Updated black cuasing some reformatting (#218)

This commit is contained in:
Elias Freider
2023-02-06 16:13:33 +01:00
committed by GitHub
parent 01ca9f9b2a
commit c01ef3a054
3 changed files with 3 additions and 9 deletions

View File

@@ -85,7 +85,7 @@ def detect_faces(fn, start, stop):
for img in clip.iter_frames():
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.1, 4)
for (x, y, w, h) in faces:
for x, y, w, h in faces:
cv2.rectangle(img, (x, y), (x + w, y + h), (255, 0, 0), 2)
imgs.append(img)

View File

@@ -5,13 +5,12 @@ import dataclasses
import json
import time
import urllib.request
from typing import Any
import modal
from . import config
from typing import Any
rnn_image = modal.Image.debian_slim().pip_install(
"keras",
"pandas",
@@ -117,7 +116,6 @@ def generate_names(
if len(gen_name) > MAX_NAME_LEN:
continue
elif len(gen_name) >= MIN_NAME_LEN:
# Only allow new and unique names
if gen_name not in training_names and gen_name not in new_names:
new_names.add(gen_name)
@@ -191,9 +189,8 @@ def train_rnn(
training_names: list[str],
max_sequence_len: int,
):
from keras.layers import LSTM, Dense
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.optimizers import RMSprop
epochs = 100 # Number of times we train on our full data

View File

@@ -80,7 +80,6 @@ class NYArticle:
secret=modal.Secret.from_name("nytimes"), image=stub["scraping_image"]
)
def latest_science_stories(n_stories: int = 5) -> List[NYArticle]:
# query api for latest science articles
params = {
"api-key": os.environ["NYTIMES_API_KEY"],
@@ -116,7 +115,6 @@ def latest_science_stories(n_stories: int = 5) -> List[NYArticle]:
@stub.function(image=stub["scraping_image"])
def scrape_nyc_article(url: str) -> str:
print(f"Scraping article => {url}")
# fetch article; simulate desktop browser
@@ -151,7 +149,6 @@ def scrape_nyc_article(url: str) -> str:
memory=4096,
)
def summarize_article(text: str) -> str:
print(f"Summarizing text with {len(text)} characters.")
# summarize text