Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commit 6160e38

Browse files
committed
check solution presence
1 parent 2adc60e commit 6160e38

File tree

1 file changed

+9
-12
lines changed

1 file changed

+9
-12
lines changed

‎leetcode_scraper.py

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -59,26 +59,20 @@ def scrape_problem(self, problem_slug):
5959

6060
try:
6161
response = requests.post(self.graphql_url, headers=self.headers, json=query)
62-
6362
if response.status_code != 200:
6463
print(f"Failed to fetch problem: {problem_slug}. Status code: {response.status_code}")
6564
return None
66-
6765
data = response.json()
68-
question=data.get('data', {}).get('question', {})
69-
70-
if notquestion:
71-
print(f"No data found for problem: {problem_slug}")
66+
print(f"[DEBUG] Raw API response for {problem_slug}: {json.dumps(data, indent=2)}")
67+
question=data.get('data', {}).get('question', None)
68+
if questionisNone:
69+
print(f"No question data found for problem: {problem_slug}. Response structure may have changed or the slug is invalid.")
7270
return None
73-
7471
# Process the problem data
7572
problem_data = self._process_problem_data(question)
76-
7773
# Save the problem data
7874
self._save_problem_data(problem_slug, problem_data)
79-
8075
return problem_data
81-
8276
except Exception as e:
8377
print(f"Error scraping problem {problem_slug}: {str(e)}")
8478
return None
@@ -208,7 +202,10 @@ def _process_problem_data(self, question):
208202
problem_data['code_snippets'] = code_snippets
209203

210204
# Extract solution content if available
211-
solution_content = question.get('solution', {}).get('content')
205+
solution = question.get('solution')
206+
solution_content = None
207+
if solution and isinstance(solution, dict):
208+
solution_content = solution.get('content')
212209
if solution_content:
213210
solution_soup = BeautifulSoup(solution_content, 'html.parser')
214211
problem_data['solution'] = solution_soup.get_text(strip=True)
@@ -260,7 +257,7 @@ def scrape_problem_list(self, limit=10):
260257

261258
if __name__ == "__main__":
262259
scraper = LeetCodeScraper()
263-
problem_data = scraper.scrape_problem("longest-strictly-increasing-or-strictly-decreasing-subarray")
260+
problem_data = scraper.scrape_problem("list-the-products-ordered-in-a-period")
264261
print(json.dumps(problem_data, indent=2))
265262
# Option 2: Scrape multiple problems from the list
266263
# problem_list = scraper.scrape_problem_list(limit=5)

0 commit comments

Comments
(0)

AltStyle によって変換されたページ (->オリジナル) /