export script

This commit is contained in:
2026-04-13 21:10:37 -03:00
parent 9ecc341da8
commit 236ab941a5
16 changed files with 2410 additions and 158 deletions

View File

@@ -10,7 +10,7 @@
<sourceFolder url="file://$MODULE_DIR$/service_bk" isTestSource="false" /> <sourceFolder url="file://$MODULE_DIR$/service_bk" isTestSource="false" />
<excludeFolder url="file://$MODULE_DIR$/.venv" /> <excludeFolder url="file://$MODULE_DIR$/.venv" />
</content> </content>
<orderEntry type="inheritedJdk" /> <orderEntry type="jdk" jdkName="Python 3.12 (export-sereia-guardia-fastapi)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="sourceFolder" forTests="false" />
</component> </component>
</module> </module>

2
.idea/misc.xml generated
View File

@@ -3,5 +3,5 @@
<component name="Black"> <component name="Black">
<option name="sdkName" value="Python 3.12 (export-sereia-guardia-tray)" /> <option name="sdkName" value="Python 3.12 (export-sereia-guardia-tray)" />
</component> </component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12 (export-sereia-guardia-tray)" project-jdk-type="Python SDK" /> <component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12 (export-sereia-guardia-fastapi)" project-jdk-type="Python SDK" />
</project> </project>

28
.idea/workspace.xml generated
View File

@@ -5,7 +5,12 @@
</component> </component>
<component name="ChangeListManager"> <component name="ChangeListManager">
<list default="true" id="8dec87a3-1d37-4987-9cda-facb38ae979f" name="Changes" comment="Docker"> <list default="true" id="8dec87a3-1d37-4987-9cda-facb38ae979f" name="Changes" comment="Docker">
<change beforePath="$PROJECT_DIR$/Dockerfile" beforeDir="false" afterPath="$PROJECT_DIR$/Dockerfile" afterDir="false" /> <change beforePath="$PROJECT_DIR$/.idea/export-sereia-guardia-fastapi.iml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/export-sereia-guardia-fastapi.iml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/.idea/misc.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/misc.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/db/helpers.py" beforeDir="false" afterPath="$PROJECT_DIR$/db/helpers.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/docker-compose.yml" beforeDir="false" afterPath="$PROJECT_DIR$/docker-compose.yml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/main.py" beforeDir="false" afterPath="$PROJECT_DIR$/main.py" afterDir="false" />
</list> </list>
<option name="SHOW_DIALOG" value="false" /> <option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" /> <option name="HIGHLIGHT_CONFLICTS" value="true" />
@@ -29,7 +34,6 @@
&quot;associatedIndex&quot;: 5 &quot;associatedIndex&quot;: 5
}</component> }</component>
<component name="ProjectId" id="2kQs1a5QFZfnTQGZJLgbgFkUYJX" /> <component name="ProjectId" id="2kQs1a5QFZfnTQGZJLgbgFkUYJX" />
<component name="ProjectLevelVcsManager" settingsEditedManually="true" />
<component name="ProjectViewState"> <component name="ProjectViewState">
<option name="hideEmptyMiddlePackages" value="true" /> <option name="hideEmptyMiddlePackages" value="true" />
<option name="showLibraryContents" value="true" /> <option name="showLibraryContents" value="true" />
@@ -37,14 +41,18 @@
<component name="PropertiesComponent"><![CDATA[{ <component name="PropertiesComponent"><![CDATA[{
"keyToString": { "keyToString": {
"Python.main (1).executor": "Run", "Python.main (1).executor": "Run",
"Python.main.executor": "Run", "Python.main.executor": "Debug",
"Python.order_parser.executor": "Run", "Python.order_parser.executor": "Run",
"Python.products_sold_parser.executor": "Run", "Python.products_sold_parser.executor": "Run",
"RunOnceActivity.OpenProjectViewOnStart": "true", "RunOnceActivity.OpenProjectViewOnStart": "true",
"RunOnceActivity.ShowReadmeOnStart": "true", "RunOnceActivity.ShowReadmeOnStart": "true",
"RunOnceActivity.git.unshallow": "true",
"SHARE_PROJECT_CONFIGURATION_FILES": "true", "SHARE_PROJECT_CONFIGURATION_FILES": "true",
"git-widget-placeholder": "master",
"ignore.virus.scanning.warn.message": "true", "ignore.virus.scanning.warn.message": "true",
"last_opened_file_path": "C:/Users/chris/Documents/Projects/export-sereia-guardia-fastapi" "last_opened_file_path": "C:/Users/chris/Documents/Projects/export-sereia-guardia-fastapi",
"settings.editor.selected.configurable": "com.jetbrains.python.configuration.PyActiveSdkModuleConfigurable",
"settings.editor.splitter.proportion": "0.28409094"
} }
}]]></component> }]]></component>
<component name="RecentsManager"> <component name="RecentsManager">
@@ -183,7 +191,7 @@
<component name="SharedIndexes"> <component name="SharedIndexes">
<attachedChunks> <attachedChunks>
<set> <set>
<option value="bundled-python-sdk-50da183f06c8-d3b881c8e49f-com.jetbrains.pycharm.community.sharedIndexes.bundled-PC-233.13135.95" /> <option value="bundled-python-sdk-890ed5b35930-d9c5bdb153f4-com.jetbrains.pycharm.pro.sharedIndexes.bundled-PY-251.23774.444" />
</set> </set>
</attachedChunks> </attachedChunks>
</component> </component>
@@ -268,7 +276,15 @@
<option name="project" value="LOCAL" /> <option name="project" value="LOCAL" />
<updated>1737832700185</updated> <updated>1737832700185</updated>
</task> </task>
<option name="localTasksCounter" value="10" /> <task id="LOCAL-00010" summary="Docker">
<option name="closed" value="true" />
<created>1737832825112</created>
<option name="number" value="00010" />
<option name="presentableId" value="LOCAL-00010" />
<option name="project" value="LOCAL" />
<updated>1737832825112</updated>
</task>
<option name="localTasksCounter" value="11" />
<servers /> <servers />
</component> </component>
<component name="Vcs.Log.Tabs.Properties"> <component name="Vcs.Log.Tabs.Properties">

View File

@@ -6,7 +6,7 @@ from db.models import Base, Order, OrderStatus
def get_engine(): def get_engine():
return create_engine('postgresql+psycopg2://postgres:guardia123@192.168.1.202:5432/export-tray', echo=False) return create_engine('postgresql+psycopg2://postgres:guardia123@192.168.1.204:5432/export-tray', echo=False)
def create_tables(): def create_tables():
@@ -31,6 +31,8 @@ def drop_tables():
Base.metadata.drop_all(engine) Base.metadata.drop_all(engine)
def insert_into_table(values_map, table_name): def insert_into_table(values_map, table_name):
engine = get_engine() engine = get_engine()
stmt = insert(table_name).values(values_map) stmt = insert(table_name).values(values_map)
@@ -49,3 +51,4 @@ def update_record(where_map, values_map, table_name):
result = conn.execute(stmt) result = conn.execute(stmt)
print(result) print(result)
conn.commit() conn.commit()

View File

@@ -8,6 +8,17 @@ from sqlalchemy.sql import func
class Base(DeclarativeBase): class Base(DeclarativeBase):
pass pass
class TrayWebhook(Base):
__tablename__ = 'tray_webhook'
id = Column(Integer, primary_key=True, autoincrement=True)
#create_date: Mapped[datetime] = mapped_column(DateTime, onupdate=func.now(), server_default=func.now())
body = Column(String(255))
def __init__(self, body):
self.body = body
class ApiToken(Base): class ApiToken(Base):
__tablename__ = 'api_token' __tablename__ = 'api_token'

View File

@@ -1,10 +1,43 @@
services: services:
fastapi-app: # api:
build: # container_name: sg-api
context: . # build:
dockerfile: Dockerfile # context: .
ports: # dockerfile: Dockerfile
- "8000:8000" # ports:
# - "8000:8000"
# volumes:
# - .:/app # Mount the local directory to the container for development
# command: uvicorn main:app --host 0.0.0.0 --port 8000 --reload
# networks:
# - sg-net
postgres:
image: postgres
container_name: sg-postgres
restart: unless-stopped
environment:
POSTGRES_PASSWORD: guardia123
POSTGRES_DB: export-tray
volumes: volumes:
- .:/app # Mount the local directory to the container for development - ./postgres:/var/lib/postgresql/data
command: uvicorn main:app --host 0.0.0.0 --port 8000 --reload networks:
- sg-net
ports:
- "5432:5432"
pgadmin:
image: dpage/pgadmin4
environment:
PGADMIN_DEFAULT_EMAIL: christoph.califice@hotmail.com
PGADMIN_DEFAULT_PASSWORD: asdf1234
PGADMIN_LISTEN_PORT: 80
user: "1002:1003"
ports:
- "15432:80"
volumes:
- pgadmin:/var/lib/pgadmin
networks:
sg-net:

38
import_json.py Normal file
View File

@@ -0,0 +1,38 @@
import json
import csv
INPUT_FILE = "orders.json"
OUTPUT_FILE = "orders_separated.csv"
def main():
# Load JSON array
with open(INPUT_FILE, "r", encoding="utf-8") as f:
data = json.load(f)
# Ensure it's a list
if not isinstance(data, list):
raise ValueError("JSON file must contain a top-level array of objects.")
# Convert nested fields to JSON strings
for item in data:
if "Customers" in item:
item["Customers"] = json.dumps(item["Customers"], ensure_ascii=False)
if "ProductsSold" in item:
item["ProductsSold"] = json.dumps(item["ProductsSold"], ensure_ascii=False)
# Collect all possible keys to ensure full CSV header
all_keys = set()
for item in data:
all_keys.update(item.keys())
fieldnames = list(all_keys)
# Write CSV
with open(OUTPUT_FILE, "w", newline="", encoding="utf-8") as f:
writer = csv.DictWriter(f, fieldnames=fieldnames)
writer.writeheader()
writer.writerows(data)
print(f"Saved {len(data)} rows to {OUTPUT_FILE}")
if __name__ == "__main__":
main()

56
import_requests.py Normal file
View File

@@ -0,0 +1,56 @@
import requests
import csv
import time
API_URL = "https://www.sereiaguardia.com.br/web_api/orders/:orderId/complete?access_token=APP_ID-6197-STORE_ID-1018776-7a249468d35d27e5b151a11228bc88a1d2db55edb59b4a6477ec9b04e7aa703c"
OUTPUT_CSV = "orders.csv"
RATE_LIMIT_SECONDS = 0.5 # 60 req/min = 1 req/sec
# Optional authentication
HEADERS = {
# "Authorization": "Bearer TOKEN",
# "Accept": "application/json"
}
def fetch_order(order_id):
"""Fetch a single order."""
url = API_URL.replace(":orderId", str(order_id))
response = requests.get(url, headers=HEADERS, timeout=10)
response.raise_for_status() # will throw if request fails
return response.json()
def main():
# Create CSV file and write headers after fetching first row
first_row_written = False
with open(OUTPUT_CSV, mode="w", newline="", encoding="utf-8") as file:
writer = None
for order_id in range(1, 2001):
try:
data = fetch_order(order_id)
# Create the CSV writer and header dynamically from the JSON keys
if not first_row_written:
fieldnames = data.keys()
writer = csv.DictWriter(file, fieldnames=fieldnames)
writer.writeheader()
first_row_written = True
# Write the row
writer.writerow(data)
print(f"Saved order {order_id}")
except requests.exceptions.HTTPError as err:
print(f"HTTP error for order {order_id}: {err}")
except Exception as err:
print(f"Error for order {order_id}: {err}")
# Respect rate limit
time.sleep(RATE_LIMIT_SECONDS)
print("Done!")
if __name__ == "__main__":
main()

50
main.py
View File

@@ -1,28 +1,34 @@
import logging import logging
import sys import sys
import uvicorn
from fastapi import FastAPI from fastapi import FastAPI
from pyrate_limiter import Rate, Limiter, Duration from pyrate_limiter import Rate, Limiter, Duration
from service import auth_service from service import auth_service
from service.order_service import OrderService from service.tray_order_service import TrayOrderService
from service.product_service import ProductService from service.product_service import ProductService
from db.helpers import drop_tables, create_tables
from service.webhook_service import process_webhook
app = FastAPI() app = FastAPI()
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
logger = logging.getLogger(__name__)
# db_helpers.drop_tables()
# db_helpers.create_tables()
@app.get("/get-tray") @app.get("/get-tray")
def read_root(): def read_root():
logging.basicConfig(stream=sys.stdout, filemode='a', level=logging.DEBUG)
# db_helpers.drop_tables()
# db_helpers.create_tables()
access_token = auth_service.refresh_access_token() access_token = auth_service.refresh_access_token()
rate = Rate(100, Duration.SECOND * 60) rate = Rate(100, Duration.SECOND * 60)
limiter = Limiter(rate, max_delay=50000) limiter = Limiter(rate, max_delay=50000)
order_service = TrayOrderService(access_token, limiter)
order_service = OrderService(access_token, limiter)
product_service = ProductService(access_token, limiter) product_service = ProductService(access_token, limiter)
categories = product_service.save_all_categories() categories = product_service.save_all_categories()
@@ -30,3 +36,31 @@ def read_root():
orders = order_service.save_all_orders() orders = order_service.save_all_orders()
products_sold = product_service.save_all_products_sold() products_sold = product_service.save_all_products_sold()
return 200 return 200
@app.get("/health")
def health():
return {"status": "ok"}
@app.post("/handle-webhook")
def handle_webhook(body: dict):
logger.info(body)
return process_webhook(body=body)
@app.post("/drop_tables")
def drop():
drop_tables()
return 200
@app.post("/create_tables")
def create():
create_tables()
return 200
@app.get("/get_auth_token")
def get_auth_token():
return auth_service.get_auth_token()
if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=8000)

981
orders.csv Normal file

File diff suppressed because one or more lines are too long

983
orders.json Normal file

File diff suppressed because one or more lines are too long

View File

@@ -2,15 +2,81 @@ import logging as log
from typing import Type from typing import Type
import requests import requests
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from sqlalchemy import select, desc from sqlalchemy import select, desc, delete
import constants import constants
from db.helpers import get_engine from db.helpers import get_engine
from db.models import ApiToken from db.models import ApiToken
from constants import * from constants import *
import logging
import datetime
logger = logging.getLogger(__name__)
def get_auth_token() -> str:
delete_old_tokens()
all_tokens: list[ApiToken] = get_all_tokens()
if len(all_tokens) == 0:
return generate_token().access_token
# check if there is at least one active
active_tokens: list[ApiToken] = []
tokens_to_refresh: list[ApiToken] = []
for token in all_tokens:
if token.date_expiration_access_token > datetime.datetime.now():
active_tokens.append(token)
else:
tokens_to_refresh.append(token)
with Session(get_engine()) as session:
if len(active_tokens) > 0:
logger.info("There are active tokens. Returning first and deleting the rest")
for idx, active_token in enumerate(active_tokens):
if idx > 0:
logger.info(f"More than 1 active token. Deleting auth token.")
session.delete(active_token)
def generate_token(persist=False): return active_tokens[0].access_token
for idx, ref_token in enumerate(tokens_to_refresh):
if idx > 0:
logger.info(f"More than 1 token to refresh. Deleting token expired in {ref_token.date_expiration_refresh_token}")
session.delete(ref_token)
session.commit()
return refresh_token(tokens_to_refresh[0]).access_token
def delete_old_tokens():
with Session(get_engine()) as session:
logger.info("Deleting old tokens.")
delete(ApiToken).where(ApiToken.date_expiration_refresh_token > (datetime.datetime.now() - datetime.timedelta(1)))
session.commit()
def refresh_token(token: ApiToken) -> ApiToken:
response = requests.get(f"{constants.api_url}/auth?refresh_token={token.refresh_token}")
refreshed_token = persist_auth_token(response.json())
with Session(get_engine()) as session:
session.delete(token)
session.commit()
return refresh_token
def get_all_tokens():
with Session(get_engine()) as session:
return session.query(ApiToken).all()
def generate_token() -> ApiToken:
body = { body = {
"consumer_key": api_consumer_key, "consumer_key": api_consumer_key,
"consumer_secret": api_consumer_secret, "consumer_secret": api_consumer_secret,
@@ -21,30 +87,9 @@ def generate_token(persist=False):
if response.status_code in [201, 200]: if response.status_code in [201, 200]:
log.info('Generated tray access token: %s', response.json()) log.info('Generated tray access token: %s', response.json())
if persist: return persist_auth_token(response.json())
log.info('Persisting token')
persist_auth_token(response.json())
return response def persist_auth_token(api_token) -> ApiToken:
def refresh_access_token():
generate_token(persist=True)
token = get_last_api_token()
log.info('Refreshing token')
response = requests.get(f"{api_url}/auth?refresh_token={token.refresh_token}").json()
try:
persist_auth_token(response)
return response['access_token']
except Exception as e:
log.error(e)
return None
def persist_auth_token(api_token):
with Session(get_engine()) as session: with Session(get_engine()) as session:
token = ApiToken(message=api_token["message"], token = ApiToken(message=api_token["message"],
code=api_token["code"], code=api_token["code"],
@@ -59,6 +104,8 @@ def persist_auth_token(api_token):
session.add(token) session.add(token)
session.commit() session.commit()
return token
def delete_token(token): def delete_token(token):
with Session(get_engine()) as session: with Session(get_engine()) as session:

View File

View File

@@ -1,109 +0,0 @@
from datetime import datetime
from sqlalchemy.orm import sessionmaker
from db.helpers import get_engine
from db.models import Order, OrderStatus, OrderProductsSold # , ProductsSold
from service.helper import parse_date, get_paged_data
class OrderService:
session = None
access_token = None
limiter = None
def __init__(self, access_token, limiter):
self.access_token = access_token
Session = sessionmaker(bind=get_engine())
self.session = Session()
self.limiter = limiter
def save_all_orders(self):
params = {
'access_token': self.access_token
}
json_orders = get_paged_data('orders', 'Orders', self.limiter, max_pages=None, params=params)
self.save_orders(json_orders)
def parse_order(self, json_order):
return Order(
id=int(json_order['id']),
status=json_order['status'],
date=parse_date(json_order['date']),
customer_id=int(json_order['customer_id']),
partial_total=float(json_order['partial_total']),
taxes=float(json_order['taxes']),
discount=float(json_order['discount']),
point_sale=json_order['point_sale'],
shipment=json_order['shipment'],
shipment_value=float(json_order['shipment_value']),
shipment_date=parse_date(json_order['shipment_date']),
store_note=json_order['store_note'],
discount_coupon=json_order['discount_coupon'],
payment_method_rate=float(json_order['payment_method_rate']),
value_1=float(json_order['value_1']),
payment_form=json_order['payment_form'],
sending_code=json_order['sending_code'],
session_id=json_order['session_id'],
total=float(json_order['total']),
payment_date=parse_date(json_order['payment_date']),
access_code=json_order['access_code'],
progressive_discount=float(json_order['progressive_discount']),
shipping_progressive_discount=float(json_order['shipping_progressive_discount']),
shipment_integrator=json_order['shipment_integrator'],
modified=datetime.strptime(json_order['modified'], '%Y-%m-%d %H:%M:%S'),
printed=bool(json_order['printed']),
interest=float(json_order['interest']),
cart_additional_values_discount=float(json_order['cart_additional_values_discount']),
cart_additional_values_increase=float(json_order['cart_additional_values_increase']),
id_quotation=json_order['id_quotation'],
estimated_delivery_date=parse_date(json_order['estimated_delivery_date']),
external_code=json_order['external_code'],
tracking_url=json_order['tracking_url'],
has_payment=bool(int(json_order['has_payment'])),
has_shipment=bool(int(json_order['has_shipment'])),
has_invoice=bool(int(json_order['has_invoice'])),
total_comission_user=float(json_order['total_comission_user']),
total_comission=float(json_order['total_comission']),
is_traceable=bool(json_order['is_traceable']),
order_status_id=int(json_order['OrderStatus']['id'])
)
def parse_order_status(self, json_order_status):
return OrderStatus(
id=int(json_order_status['id']),
default=bool(int(json_order_status['default'])),
type=json_order_status['type'],
show_backoffice=bool(int(json_order_status['show_backoffice'])),
allow_edit_order=bool(int(json_order_status['allow_edit_order'])),
status=json_order_status['status'],
description=json_order_status['description'],
background=json_order_status['background']
)
def parse_order_products_sold(self, json_order_ps, order_id):
return OrderProductsSold(
order_id=order_id,
products_sold_id=json_order_ps['id']
)
def save_orders(self, orders):
for json_order in orders:
json_order = json_order['Order']
order = self.parse_order(json_order)
if 'OrderStatus' in json_order:
json_order_status = json_order['OrderStatus']
order_status = self.parse_order_status(json_order_status)
self.session.merge(order_status)
if 'ProductsSold' in json_order:
for json_ps in json_order['ProductsSold']:
order_products_sold = self.parse_order_products_sold(json_ps, order.id)
self.session.merge(order_products_sold)
self.session.merge(order)
self.session.commit()

View File

@@ -0,0 +1,122 @@
from datetime import datetime
from sqlalchemy.orm import sessionmaker
from db.helpers import get_engine
from db.models import Order, OrderStatus, OrderProductsSold # , ProductsSold
from service.helper import parse_date, get_paged_data
from constants import api_url
import logging
import requests
from service.auth_service import get_auth_token
from pyrate_limiter import Rate, Limiter, Duration
class TrayOrderService:
access_token = None
limiter = None
logger = logging.getLogger(__name__)
def __init__(self):
self.limiter = Limiter(Rate(100, Duration.SECOND * 60), max_delay=50000)
self.access_token = get_auth_token()
def get_complete_order(self, order_id: int):
url = f"{api_url}/orders/{order_id}/complete?access_token={self.access_token}"
response = requests.get(url)
return self.parse_complete_order(response.json()['Order'])
def save_all_orders(self):
params = {
'access_token': self.access_token
}
json_orders = get_paged_data('orders', 'Orders', self.limiter, max_pages=None, params=params)
self.save_orders(json_orders)
def parse_complete_order(self, json):
order = json.get('Order')
def parse_order(self, json_order):
return Order(
id=int(json_order.get('id')),
status=json_order.get('status'),
date=parse_date(json_order.get('date')),
customer_id=int(json_order.get('customer_id')),
partial_total=float(json_order.get('partial_total')),
taxes=float(json_order.get('taxes')),
discount=float(json_order.get('discount')),
point_sale=json_order.get('point_sale'),
shipment=json_order.get('shipment'),
shipment_value=float(json_order.get('shipment_value')),
shipment_date=parse_date(json_order.get('shipment_date')),
store_note=json_order.get('store_note'),
discount_coupon=json_order.get('discount_coupon'),
payment_method_rate=float(json_order.get('payment_method_rate')),
value_1=float(json_order.get('value_1')),
payment_form=json_order.get('payment_form'),
sending_code=json_order.get('sending_code'),
session_id=json_order.get('session_id'),
total=float(json_order.get('total')),
payment_date=parse_date(json_order.get('payment_date')),
access_code=json_order.get('access_code'),
#progressive_discount=float(json_order.get('progressive_discount')),
#shipping_progressive_discount=float(json_order.get('shipping_progressive_discount')),
shipment_integrator=json_order.get('shipment_integrator'),
modified=datetime.strptime(json_order.get('modified'), '%Y-%m-%d %H:%M:%S'),
printed=bool(json_order.get('printed')),
#interest=float(json_order.get('interest')),
#cart_additional_values_discount=float(json_order.get('cart_additional_values_discount')),
#cart_additional_values_increase=float(json_order.get('cart_additional_values_increase')),
id_quotation=json_order.get('id_quotation'),
estimated_delivery_date=parse_date(json_order.get('estimated_delivery_date')),
external_code=json_order.get('external_code'),
tracking_url=json_order.get('tracking_url'),
has_payment=bool(int(json_order.get('has_payment'))),
has_shipment=bool(int(json_order.get('has_shipment'))),
has_invoice=bool(int(json_order.get('has_invoice'))),
#total_comission_user=float(json_order.get('total_comission_user')),
#total_comission=float(json_order.get('total_comission')),
is_traceable=bool(json_order.get('is_traceable')),
#order_status_id=int(json_order.get('OrderStatus')['id'])
)
def parse_order_status(self, json_order_status):
return OrderStatus(
id=int(json_order_status['id']),
default=bool(int(json_order_status['default'])),
type=json_order_status['type'],
show_backoffice=bool(int(json_order_status['show_backoffice'])),
allow_edit_order=bool(int(json_order_status['allow_edit_order'])),
status=json_order_status['status'],
description=json_order_status['description'],
background=json_order_status['background']
)
def parse_order_products_sold(self, json_order_ps, order_id):
return OrderProductsSold(
order_id=order_id,
products_sold_id=json_order_ps['id']
)
def save_orders(self, orders):
for json_order in orders:
json_order = json_order['Order']
order = self.parse_order(json_order)
if 'OrderStatus' in json_order:
json_order_status = json_order['OrderStatus']
order_status = self.parse_order_status(json_order_status)
self.session.merge(order_status)
if 'ProductsSold' in json_order:
for json_ps in json_order['ProductsSold']:
order_products_sold = self.parse_order_products_sold(json_ps, order.id)
self.session.merge(order_products_sold)
self.session.merge(order)
self.session.commit()

View File

@@ -0,0 +1,37 @@
from db.helpers import get_engine
from sqlalchemy.orm import Session
from db.models import TrayWebhook
import json
import logging
from service.tray_order_service import TrayOrderService
logger = logging.getLogger(__name__)
def process_webhook(body: dict):
save_webhook(body)
act = body['act']
order_id = body['scope_id']
seller_id = body['seller_id']
if seller_id != "1018776":
logger.info(f"seller_id: {seller_id}. Exiting.")
return 400
order_service = TrayOrderService()
if act == 'insert' or act == 'update':
order = order_service.get_complete_order(order_id)
logger.info(order)
return order
return 400
def save_webhook(body: dict):
with Session(get_engine()) as session:
webhook = TrayWebhook(json.dumps(body))
session.add(webhook)
session.commit()