Tuesday, 26 May 2020

NamedTuple Example in Python

import collections 
Movie = collections.namedtuple('Movie',['MovieID','Title','Genres'])  #NamedTuple Example


M = Movie('1','Toy Story (1995)',"Animation|Children's|Comedy") 

print(M[0])
print(M[1])
print(M.MovieID)
print(M.Title)
print(M.Genres)

1
Toy Story (1995)
1
Toy Story (1995)
Animation|Children's|Comedy


#NamedTuple Example : #1

import collections 
Movie = collections.namedtuple('Movie',['MovieID','Title','Genres']) 

def parseMovie(_row):
fields = _row.split("::")
movieid = (int) (fields[0])
title = fields[1]
genres = fields[2]
M = Movie(movieid,title,genres)
return M

print(parseMovie("1::Toy Story (1995)::Animation|Children's|Comedy"))

Answer:
Movie(MovieID=1, Title='Toy Story (1995)', Genres="Animation|Children's|Comedy")


#NamedTuple Example : #2
import collections 
from datetime import datetime

Rating = collections.namedtuple('Rating',['UserID','MovieID','Rating','Timestamp']) 

def parseRatingRecord(_row):
fields = _row.split("::")
userid = (int) (fields[0])
movieid = (int) (fields[1])
rating = (int) (fields[2])
_timestamp = datetime.fromtimestamp((int) (fields[3]))
_rating = Rating(userid,movieid,rating,_timestamp)
return _rating

print(parseRatingRecord("1::1193::5::978300760"))

Answer:
Rating(UserID=1, MovieID=1193, Rating=5, Timestamp=datetime.datetime(2001, 1, 1, 3, 42, 40))


#Example 3

import collections
User = collections.namedtuple("Users",["UserID","Gender","Age","Occupation","ZipCode"])

def parseUserRecord(_row):
fields = _row.split("::")
userid = (int)(fields[0])
gender= fields[1]
age = (int) (fields[2])
occupation = (int) (fields[3])
zipcode = (int) (fields[4])
_user = User(userid,gender,age,occupation,zipcode) 
return _user

print(parseUserRecord("3::M::25::15::55117"))            
                               
                               

No comments:

Post a Comment

Flume - Simple Demo

// create a folder in hdfs : $ hdfs dfs -mkdir /user/flumeExa // Create a shell script which generates : Hadoop in real world <n>...