Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commit 126973f

Browse files
Merge pull request #2 from avinashkranjan/master
Updated the repo
2 parents c9a576d + 7fb586a commit 126973f

File tree

519 files changed

+126378
-3534
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

519 files changed

+126378
-3534
lines changed

‎.all-contributorsrc‎

Lines changed: 0 additions & 676 deletions
This file was deleted.

‎.gitignore‎

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
21
### JetBrains template
32

43
# User-specific stuff
@@ -668,5 +667,15 @@ pip-selfcheck.json
668667

669668
### JupyterNotebooks template
670669

671-
672670
*/.ipynb_checkpoints/*
671+
672+
### User Added Files
673+
674+
geoip/
675+
/venv
676+
/__pycache__
677+
.idea/
678+
test.py
679+
Test/
680+
reddit_tokens.json
681+
scriptcopy.py

‎Amazon-Price-Alert/README.md‎

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,10 @@ Do remember to install the dependencies in the requirements.txt file!
2727
- requests_html
2828
- BeautifulSoup
2929

30+
## Output
31+
32+
![](https://i.postimg.cc/2ScYTnr4/screenshot.png)
33+
3034
## Development Status
3135

3236
This scrapper is complete. A future version may have emails sent via a server.

‎Amazon-Price-Alert/amazon_scraper.py‎

Lines changed: 14 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -43,11 +43,11 @@ def get_title(self):
4343
# Stores the price of the product after filtering the string and
4444
# converting it to an integer
4545
def get_price(self):
46-
price_raw = self.soup.find(
47-
'span', id='priceblock_ourprice').text.strip()
46+
price_raw = self.soup.find('span',
47+
id='priceblock_ourprice').text.strip()
4848
price_filtered = price_raw[2:len(price_raw) - 3]
49-
self.product_price = int(
50-
''.join([x for x in price_filtered if x != ',']))
49+
self.product_price = int(''.join(
50+
[x for x in price_filtered if x != ',']))
5151
return
5252

5353
# Prints product title
@@ -116,8 +116,7 @@ def send_email(self):
116116

117117

118118
def main():
119-
url = input(
120-
"Paste the link of the Amazon product:")
119+
url = input("Paste the link of the Amazon product:")
121120
budget = int(input("Enter you budget price:"))
122121
u_email = input("Enter your email:")
123122
inp_str = ("How frequuently would you like to check the price?"
@@ -130,16 +129,15 @@ def main():
130129
time_delay = 3 * 60 * 60
131130
else:
132131
time_delay = 6 * 60 * 60
133-
msg = (
134-
"Great! Now just sit back and relax."
135-
"Minimize this program and be sure "
136-
"that it is running.\nAdditionally, ensure that there"
137-
"is stable internet connection "
138-
"during the time this program runs.\nIf the price of the "
139-
"product falls within your budget, "
140-
"you will recieve an email regarding the same and this"
141-
"program will auto-close.\nThank you for using "
142-
"C3PO scraper! Beep-bop bop-beep.")
132+
msg = ("Great! Now just sit back and relax."
133+
"Minimize this program and be sure "
134+
"that it is running.\nAdditionally, ensure that there"
135+
"is stable internet connection "
136+
"during the time this program runs.\nIf the price of the "
137+
"product falls within your budget, "
138+
"you will recieve an email regarding the same and this"
139+
"program will auto-close.\nThank you for using "
140+
"C3PO scraper! Beep-bop bop-beep.")
143141
print(msg)
144142
c3po = Scraper(url, budget, u_email)
145143
while True:

‎Amazon-Price-Alert/screenshot.png‎

-10.4 KB
Binary file not shown.

‎Amazon Price Alert/Amazon-Price-Tracker/amazonprice.py‎ renamed to ‎Amazon-Price-Tracker/amazonprice.py‎

Lines changed: 29 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -11,16 +11,12 @@
1111

1212
# get your browser information by searching "my user agent"
1313
user_agent = input("Enter your User-Agent string here\n")
14-
headers = {
15-
"User-Agent": f'{user_agent}'
16-
17-
}
14+
headers = {"User-Agent": f'{user_agent}'}
1815
Url = input("Drop the Url of product you wish to buy...!\n")
1916

2017
page = requests.get(Url, headers=headers)
2118
soup = BeautifulSoup(page.content, "html.parser")
2219

23-
2420
# print(soup)
2521

2622

@@ -41,26 +37,39 @@ def mail_sending(mail_id, title, password):
4137
def check_price():
4238
title = soup.find(id="productTitle").get_text().strip()
4339
try:
44-
price = soup.find(id="priceblock_ourprice_row").get_text().strip()[:20].replace('₹', '').replace(' ',
45-
'').replace(
46-
'Price:', '').replace('\n', '').replace('\xa0', '').replace(',', '').replace('Fu', '')
40+
price = soup.find(
41+
id="priceblock_ourprice_row").get_text().strip()[:20].replace(
42+
'₹', '').replace(' ', '').replace('Price:', '').replace(
43+
'\n', '').replace('\xa0',
44+
'').replace(',', '').replace('Fu', '')
4745

4846
except:
4947
try:
50-
price = soup.find(id="priceblock_dealprice").get_text().strip()[:20].replace('₹', '').replace(' ',
51-
'').replace(
52-
'Price:', '').replace('\n', '').replace('\xa0', '').replace(',', '').replace('Fu', '')
48+
price = soup.find(
49+
id="priceblock_dealprice").get_text().strip()[:20].replace(
50+
'₹', '').replace(' ', '').replace('Price:', '').replace(
51+
'\n', '').replace('\xa0',
52+
'').replace(',',
53+
'').replace('Fu', '')
5354

5455
except:
5556
try:
56-
price = soup.find(id="priceblock_ourprice").get_text().strip()[:20].replace('₹', '').replace(' ',
57-
'').replace(
58-
'Price:', '').replace('\n', '').replace('\xa0', '').replace(',', '').replace('Fu', '')
57+
price = soup.find(
58+
id="priceblock_ourprice").get_text().strip()[:20].replace(
59+
'₹',
60+
'').replace(' ', '').replace('Price:', '').replace(
61+
'\n',
62+
'').replace('\xa0',
63+
'').replace(',', '').replace('Fu', '')
5964

6065
except:
61-
price = soup.find(id="priceblock_ourprice_lbl").get_text().strip()[:20].replace('₹', '').replace(' ',
62-
'').replace(
63-
'Price:', '').replace('\n', '').replace('\xa0', '').replace(',', '').replace('Fu', '')
66+
price = soup.find(id="priceblock_ourprice_lbl").get_text(
67+
).strip()[:20].replace('₹', '').replace(' ', '').replace(
68+
'Price:',
69+
'').replace('\n',
70+
'').replace('\xa0',
71+
'').replace(',',
72+
'').replace('Fu', '')
6473

6574
fixed_price = float(price)
6675
print(title)
@@ -69,7 +78,9 @@ def check_price():
6978
your_price = y_price.replace(',', '')
7079
mail_id = input("Please enter your email id: ")
7180
password = input("Enter your app password here: ")
72-
print("Thank You! You'll receive an email as soon as the price of product drops...!")
81+
print(
82+
"Thank You! You'll receive an email as soon as the price of product drops...!"
83+
)
7384
# print(price)
7485
if fixed_price <= float(your_price):
7586
mail_sending(mail_id, title, password)

‎Anime-Tracker/anime_tracker.py‎

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
2-
31
try:
42
import requests
53
from bs4 import BeautifulSoup
@@ -12,18 +10,23 @@
1210

1311
# mainly bs4 lib is used for extracting html from web pages
1412

13+
1514
def details(soup):
1615

17-
info = soup.find('div', {'class': 'pure-1 md-3-5'}) # selecting div with class pure...
18-
print("\nAbout the Anime : \n", "\t\t", info.find('p').getText(), "\n") # now extracting the text for p tag of the div
16+
# selecting div with class pure...
17+
info = soup.find('div', {'class': 'pure-1 md-3-5'})
18+
# now extracting the text for p tag of the div
19+
print("\nAbout the Anime : \n", "\t\t", info.find('p').getText(), "\n")
1920

2021
total_episodes = soup.find('div', {'class': 'pure-1 md-1-5'})
2122
print("\nTotal number of episodes :\t",
22-
re.sub("[^0-9]", "", total_episodes.find('span').getText())) # usimg regex for only selecting numbers
23+
re.sub(
24+
"[^0-9]", "",
25+
total_episodes.find(
26+
'span').getText())) # usimg regex for only selecting numbers
2327

2428
Active_years = soup.find('span', {'class': 'iconYear'})
25-
print("\n Years Active (From-To)\t:\t",
26-
Active_years.getText(), "-\n")
29+
print("\n Years Active (From-To)\t:\t", Active_years.getText(), "-\n")
2730

2831
rating = soup.find('div', {'class': 'avgRating'})
2932
print("Rating : ", rating.find('span').getText())
@@ -42,7 +45,8 @@ def details(soup):
4245
def entry():
4346
print("\nType complete name>>\n")
4447
anime_name = input(
45-
"[+] Enter the name of the Anime : ").strip().title().replace(" ", "-")
48+
"[+] Enter the name of the Anime : ").strip().title().replace(
49+
" ", "-")
4650

4751
print("\n")
4852
print(anime_name)
@@ -51,7 +55,8 @@ def entry():
5155
source_code = requests.get(search_url)
5256
content = source_code.content
5357
global soup
54-
soup = BeautifulSoup(content, features="html.parser") # to parse the selectd HTML
58+
# to parse the selectd HTML
59+
soup = BeautifulSoup(content, features="html.parser")
5560
# print(soup.prettify)
5661

5762
try:

‎Anime-Tracker/anime_tracker_.png‎

-24.1 KB
Binary file not shown.

‎Anime-Tracker/readme.md‎

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,9 @@
1-
# this is a python script for giving information about the anime like information about anime , number of episodes released till date, Years active, Tags related to it.
1+
# Anime-Tracker
2+
3+
This is a python script for giving information about the anime like information about anime , number of episodes released till date, Years active, Tags related to it.
4+
script is built with the help of
25

3-
# script is built with the help of
46
$--- bs4 module (for web scraping)
5-
$ ---requests module
7+
$--- requests module
8+
9+
![](https://i.postimg.cc/nc9qnGwL/anime-tracker.png)
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
import cv2
2+
3+
src1= input("Enter the path of the image 1\n") #getting the path for first image
4+
src1 = cv2.imread(src1)
5+
#src1 = cv2.resize(src1,(540,540)) #resizing the image
6+
src2 = input("Enter the path of the image 2\n") #getting the path for second image
7+
src2 = cv2.imread(src2)
8+
9+
src2 = cv2.resize(src2, src1.shape[1::-1]) #Resizing the image so that both images have same dimensions
10+
andop= cv2.bitwise_and(src1, src2,mask=None) #Applying Bitwise AND operation
11+
andop=cv2.resize(andop,(640,640))
12+
cv2.imshow('Bitwise AND',andop)
13+
14+
orop= cv2.bitwise_or(src1, src2,mask=None) #Applying Bitwise OR operation
15+
orop=cv2.resize(orop,(640,640))
16+
cv2.imshow('Bitwise OR',orop)
17+
18+
xorop = cv2.bitwise_xor(src1,src2,mask=None) #Applying Bitwise OR operation
19+
xorop=cv2.resize(xorop,(640,640))
20+
cv2.imshow('Bitwise XOR',xorop)
21+
cv2.waitKey(0)
22+
cv2.destroyAllWindows()

0 commit comments

Comments
(0)

AltStyle によって変換されたページ (->オリジナル) /