@@ -67,6 +67,14 @@ def getImage(soup, url):
67
67
return res
68
68
69
69
70
+ # print dictionary
71
+ def printData (data ):
72
+ print ("\n Title : " , data ["title" ])
73
+ print ("Description : " , data ["description" ])
74
+ print ("URL : " , data ["url" ])
75
+ print ("Image link : " , data ["image" ])
76
+
77
+
70
78
# start
71
79
print ("\n ======================" )
72
80
print ("- Link Preview -" )
@@ -91,25 +99,31 @@ def getImage(soup, url):
91
99
f .write ("{}" )
92
100
f .close ()
93
101
102
+ # read db
94
103
with open ('Link-Preview/db.json' , 'r' ) as file :
95
104
db = json .loads (file .read ())
96
- db ["mj" ] = {
97
- "name" : "madhav"
98
- }
99
- print (db )
100
-
101
- # parse file
102
- with open ('Link-Preview/db.json' , 'w' ) as file :
103
- json .dump (db , file )
104
-
105
- # if not in db get via request
106
-
107
- # getting the html
108
- # r = requests.get(url)
109
- # soup = BeautifulSoup(r.text, "html.parser")
110
-
111
- # print("\nTitle : ", getTitle(soup))
112
- # print("Description : ", getDesc(soup))
113
- # print("URL : ", url)
114
- # print("Image link : ", getImage(soup, url))
115
- # print("\n--END--\n")
105
+
106
+ # check if it exists
107
+ if (url in db ):
108
+ print (db [url ])
109
+ else :
110
+ # if not in db get via request
111
+
112
+ # getting the html
113
+ r = requests .get (url )
114
+ soup = BeautifulSoup (r .text , "html.parser" )
115
+
116
+ # printing data
117
+ newData = {
118
+ "title" : getTitle (soup ),
119
+ "description" : getDesc (soup ),
120
+ "url" : url ,
121
+ "image" : getImage (soup , url )
122
+ }
123
+ printData (newData )
124
+ # parse file
125
+ db [url ] = newData
126
+ with open ('Link-Preview/db.json' , 'w' ) as file :
127
+ json .dump (db , file )
128
+
129
+ print ("\n --END--\n " )
0 commit comments