Upgraded sqlservice to 2.0.x
This commit is contained in:
parent
229ea6fb5b
commit
6c1eb903f7
8 changed files with 95 additions and 91 deletions
|
|
@ -12,45 +12,11 @@ from sqlalchemy.orm import sessionmaker, scoped_session
|
||||||
# from sqlalchemy.orm.exc import UnmappedClassError
|
# from sqlalchemy.orm.exc import UnmappedClassError
|
||||||
from pymssql import OperationalError
|
from pymssql import OperationalError
|
||||||
|
|
||||||
from sqlservice import SQLClient, SQLQuery
|
from sqlservice import Database
|
||||||
|
|
||||||
logger = logging.getLogger("PyJeeves." + __name__)
|
logger = logging.getLogger("PyJeeves." + __name__)
|
||||||
|
|
||||||
|
|
||||||
class BaseFilterQuery(SQLQuery):
|
|
||||||
def get(self, ident):
|
|
||||||
# Override get() so that the flag is always checked in the
|
|
||||||
# DB as opposed to pulling from the identity map. - this is optional.
|
|
||||||
return SQLQuery.get(self.populate_existing(), ident)
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return SQLQuery.__iter__(self.private())
|
|
||||||
|
|
||||||
def from_self(self, *ent):
|
|
||||||
# Override from_self() to automatically apply
|
|
||||||
# the criterion to. this works with count() and
|
|
||||||
# others.
|
|
||||||
return SQLQuery.from_self(self.private(), *ent)
|
|
||||||
|
|
||||||
def private(self):
|
|
||||||
# Fetch the model name and column list and apply model-specific base filters
|
|
||||||
mzero = self._mapper_zero()
|
|
||||||
|
|
||||||
if mzero:
|
|
||||||
# Sometimes a plain model class will be fetched instead of mzero
|
|
||||||
try:
|
|
||||||
model = mzero.class_
|
|
||||||
obj = mzero.class_
|
|
||||||
except Exception:
|
|
||||||
model = mzero.__class__
|
|
||||||
obj = mzero
|
|
||||||
|
|
||||||
if hasattr(model, '_base_filters'):
|
|
||||||
return self.enable_assertions(False).filter(model._base_filters(obj))
|
|
||||||
|
|
||||||
return self
|
|
||||||
|
|
||||||
|
|
||||||
class DBConnector(object):
|
class DBConnector(object):
|
||||||
"""This class is used to control the SQLAlchemy integration"""
|
"""This class is used to control the SQLAlchemy integration"""
|
||||||
|
|
||||||
|
|
@ -82,9 +48,9 @@ class DBConnector(object):
|
||||||
return retval
|
return retval
|
||||||
|
|
||||||
def execute(self, operation=""):
|
def execute(self, operation=""):
|
||||||
conn = self.raw_engine.raw_connection()
|
conn = self.raw_engine
|
||||||
|
|
||||||
with conn.cursor(as_dict=True) as cursor:
|
with conn.connection.cursor(as_dict=True) as cursor:
|
||||||
try:
|
try:
|
||||||
cursor.execute(operation)
|
cursor.execute(operation)
|
||||||
results = cursor.fetchall()
|
results = cursor.fetchall()
|
||||||
|
|
@ -100,14 +66,13 @@ class DBConnector(object):
|
||||||
|
|
||||||
uri = 'mssql+pymssql://{user}:{pw}@{host}:{port}/{db}?charset=utf8'.format(
|
uri = 'mssql+pymssql://{user}:{pw}@{host}:{port}/{db}?charset=utf8'.format(
|
||||||
**config.config['databases']['raw'])
|
**config.config['databases']['raw'])
|
||||||
sql_client_config = {'SQL_DATABASE_URI': uri}
|
|
||||||
db = SQLClient(sql_client_config, query_class=BaseFilterQuery)
|
|
||||||
|
|
||||||
return db, db.session, db.engine
|
db = Database(uri, echo=False)
|
||||||
|
|
||||||
|
return db, db.session(), db.connect()
|
||||||
|
|
||||||
def set_model_class(self, model_class):
|
def set_model_class(self, model_class):
|
||||||
self.raw_db.model_class = model_class
|
self.raw_db.model_class = model_class
|
||||||
self.raw_db.update_models_registry()
|
|
||||||
|
|
||||||
def meta_session(self):
|
def meta_session(self):
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,8 @@ from sqlalchemy.exc import OperationalError
|
||||||
from sqlalchemy.schema import MetaData, Column
|
from sqlalchemy.schema import MetaData, Column
|
||||||
from sqlalchemy.types import Integer
|
from sqlalchemy.types import Integer
|
||||||
from sqlalchemy.orm.collections import InstrumentedList
|
from sqlalchemy.orm.collections import InstrumentedList
|
||||||
from sqlalchemy import event
|
from sqlalchemy import event, orm
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
from sqlservice import ModelBase, as_declarative
|
from sqlservice import ModelBase, as_declarative
|
||||||
|
|
||||||
|
|
@ -26,16 +27,51 @@ logger.info("Reading Jeeves DB structure")
|
||||||
|
|
||||||
meta = MetaData()
|
meta = MetaData()
|
||||||
try:
|
try:
|
||||||
|
# TODO: Split raw.py and reflect tables on separate module loads?
|
||||||
meta.reflect(bind=db.raw_session.connection(),
|
meta.reflect(bind=db.raw_session.connection(),
|
||||||
only=['ar', 'ars', 'arsh', 'arean', 'xae', 'xare', 'fr', 'kus', 'x1k',
|
only=['ar', 'ars', 'arsh', 'arean', 'xae', 'xare', 'fr', 'kus', 'x1k',
|
||||||
'oh', 'orp', 'lp', 'vg', 'xp', 'xm', 'prh', 'prl',
|
'oh', 'orp', 'lp', 'vg', 'xp', 'xm', 'prh', 'prl',
|
||||||
'kp', 'kpw', 'cr', 'X4', 'xw', 'X1',
|
'kp', 'kpw', 'cr', 'X4', 'xw', 'X1', 'jfbs', 'lrfb',
|
||||||
'JAPP_EWMS_Item_Replenishment_Levels'])
|
'JAPP_EWMS_Item_Replenishment_Levels'])
|
||||||
except OperationalError as e:
|
except OperationalError as e:
|
||||||
logger.error("Failed to read Jeeves DB structure")
|
logger.error("Failed to read Jeeves DB structure")
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@event.listens_for(Session, "do_orm_execute")
|
||||||
|
def _add_filtering_criteria(execute_state):
|
||||||
|
"""Intercept all ORM queries. Add a with_loader_criteria option to all
|
||||||
|
of them.
|
||||||
|
|
||||||
|
This option applies to SELECT queries and adds a global WHERE criteria
|
||||||
|
(or as appropriate ON CLAUSE criteria for join targets)
|
||||||
|
to all objects of a certain class or superclass.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# the with_loader_criteria automatically applies itself to
|
||||||
|
# relationship loads as well including lazy loads. So if this is
|
||||||
|
# a relationship load, assume the option was set up from the top level
|
||||||
|
# query.
|
||||||
|
|
||||||
|
# TODO: Make configurable if repo made pub
|
||||||
|
company_code = execute_state.execution_options.get("company_code", 1)
|
||||||
|
|
||||||
|
if (
|
||||||
|
not execute_state.is_column_load
|
||||||
|
and not execute_state.is_relationship_load
|
||||||
|
# and not execute_state.execution_options.get("include_private", False)
|
||||||
|
):
|
||||||
|
execute_state.statement = execute_state.statement.options(
|
||||||
|
orm.with_loader_criteria(
|
||||||
|
RawBaseModel,
|
||||||
|
lambda cls: cls.ForetagKod == company_code,
|
||||||
|
include_aliases=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@as_declarative(metadata=meta)
|
@as_declarative(metadata=meta)
|
||||||
class RawBaseModel(ModelBase):
|
class RawBaseModel(ModelBase):
|
||||||
""" Generalize __init__, __repr__ and to_json
|
""" Generalize __init__, __repr__ and to_json
|
||||||
|
|
@ -62,19 +98,7 @@ class RawBaseModel(ModelBase):
|
||||||
def __init__(self, data=None, **kargs):
|
def __init__(self, data=None, **kargs):
|
||||||
if data:
|
if data:
|
||||||
data = self._map_keys(data)
|
data = self._map_keys(data)
|
||||||
self.update(data, **kargs)
|
self.set(**kargs)
|
||||||
# super(RawBaseModel, self).__init__(data=None, **kargs)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _base_filters(self, obj, filters=and_()):
|
|
||||||
# This method provides base filtering, additional filtering can be done in subclasses
|
|
||||||
# Add this method to your model if you want more filtering, otherwise leave it out
|
|
||||||
# import and_ from sqlalchemy package
|
|
||||||
# this is a base filter for ALL queries
|
|
||||||
return and_(
|
|
||||||
obj.ForetagKod == 1,
|
|
||||||
filters
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _map_columns(cls, key):
|
def _map_columns(cls, key):
|
||||||
|
|
|
||||||
|
|
@ -224,14 +224,6 @@ class Article(RawBaseModel):
|
||||||
except TypeError:
|
except TypeError:
|
||||||
logger.debug("NoneType error, %s" % self.ArtNr)
|
logger.debug("NoneType error, %s" % self.ArtNr)
|
||||||
|
|
||||||
# @classmethod
|
|
||||||
# def _base_filters(self, obj):
|
|
||||||
# return RawBaseModel._base_filters(
|
|
||||||
# obj,
|
|
||||||
# and_(obj.ItemStatusCode == 0)
|
|
||||||
# )
|
|
||||||
|
|
||||||
|
|
||||||
class ContactInformationType(RawBaseModel):
|
class ContactInformationType(RawBaseModel):
|
||||||
__tablename__ = 'X4'
|
__tablename__ = 'X4'
|
||||||
__to_dict_only__ = ('ComKod', 'ComBeskr')
|
__to_dict_only__ = ('ComKod', 'ComBeskr')
|
||||||
|
|
@ -572,3 +564,24 @@ class ItemReplenishmentLevels(RawBaseModel):
|
||||||
# "Table 'JAPP_EWMS_Item_Replenishment_Levels' does not have the identity property.
|
# "Table 'JAPP_EWMS_Item_Replenishment_Levels' does not have the identity property.
|
||||||
# Cannot perform SET operation."
|
# Cannot perform SET operation."
|
||||||
ForetagKod = Column(Integer, primary_key=True, autoincrement=False)
|
ForetagKod = Column(Integer, primary_key=True, autoincrement=False)
|
||||||
|
|
||||||
|
|
||||||
|
class SupplierInvoicePayment(RawBaseModel):
|
||||||
|
__tablename__ = 'lrfb'
|
||||||
|
# __column_map__ = {'AltEnhetKod': 'UnitCode', 'AltEnhetBeskr': 'UnitName',
|
||||||
|
# 'AltEnhetOmrFaktor': 'DefaultUnitConv'}
|
||||||
|
# __to_dict_only__ = ('AltEnhetBeskr', 'AltEnhetOmrFaktor')
|
||||||
|
|
||||||
|
|
||||||
|
class SupplierInvoiceJournal(RawBaseModel):
|
||||||
|
__tablename__ = 'jfbs'
|
||||||
|
# __column_map__ = {'ArtNr': 'ArticleNumber',
|
||||||
|
# 'AltEnhetKod': 'UnitCode', 'AltEnhetOmrFaktor': 'UnitConv',
|
||||||
|
# 'AltEnhetOrderStd': 'DefaultSalesUnit'}
|
||||||
|
# __to_dict_only__ = ('AltEnhetKod', 'AltEnhetOmrFaktor',
|
||||||
|
# 'AltEnhetOrderStd', 'ArticleAlternativeUnit')
|
||||||
|
|
||||||
|
# ArtNr = Column(String, ForeignKey('ar.ArtNr'), primary_key=True)
|
||||||
|
|
||||||
|
# AltEnhetKod = Column(String, ForeignKey('xae.AltEnhetKod'), primary_key=True)
|
||||||
|
# ArticleAlternativeUnit = relationship(ArticleAlternativeUnit, lazy='joined')
|
||||||
|
|
|
||||||
|
|
@ -82,11 +82,11 @@ class Article():
|
||||||
logger.info('Creating GTIN for %s, %s, %s' % (n1.ArtNr, n1.AltEnhetKod, n1.ArtNrEAN))
|
logger.info('Creating GTIN for %s, %s, %s' % (n1.ArtNr, n1.AltEnhetKod, n1.ArtNrEAN))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
db.raw_db.add(n1)
|
db.raw_session.add(n1)
|
||||||
logger.debug('Created/updated Article EAN for %s - %s with GTIN %s' % (
|
logger.debug('Created/updated Article EAN for %s - %s with GTIN %s' % (
|
||||||
gtin['article_no'], gtin.get('unit', 'no unit'), gtin['article_gtin']))
|
gtin['article_no'], gtin.get('unit', 'no unit'), gtin['article_gtin']))
|
||||||
|
|
||||||
db.raw_db.commit()
|
db.raw_session.commit()
|
||||||
logger.info('Succesfully commited %s GTINs to database' % (len(gtins)))
|
logger.info('Succesfully commited %s GTINs to database' % (len(gtins)))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
@ -94,9 +94,9 @@ class Article():
|
||||||
gtins = db.raw_session.query(ArticleEAN).all()
|
gtins = db.raw_session.query(ArticleEAN).all()
|
||||||
|
|
||||||
for gtin in gtins:
|
for gtin in gtins:
|
||||||
db.raw_db.delete(gtin)
|
db.raw_session.delete(gtin)
|
||||||
|
|
||||||
db.raw_db.commit()
|
db.raw_session.commit()
|
||||||
logger.info('Deleted %s GTINs' % (len(gtins)))
|
logger.info('Deleted %s GTINs' % (len(gtins)))
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -126,6 +126,7 @@ def get_gtin_for_article(article_ean, article_unit=None, use_prefix=True):
|
||||||
'Kart': 1,
|
'Kart': 1,
|
||||||
'Bricka': 1,
|
'Bricka': 1,
|
||||||
'½-pall': 2,
|
'½-pall': 2,
|
||||||
|
'½-pall A': 2,
|
||||||
'tray_no_wrap': 8
|
'tray_no_wrap': 8
|
||||||
}
|
}
|
||||||
prefixes = []
|
prefixes = []
|
||||||
|
|
@ -273,7 +274,7 @@ def create_gtins(dry_run=True):
|
||||||
for unit in article.ArticleUnit:
|
for unit in article.ArticleUnit:
|
||||||
|
|
||||||
# Skip paket for 21%, should only match HV with plastic wrapping.
|
# Skip paket for 21%, should only match HV with plastic wrapping.
|
||||||
if article.ArtNr[0:2] == '21' and unit.AltEnhetKod[0:6].lower() == 'paket':
|
if article.ArtNr[0:2] == '21' and unit.AltEnhetKod[0:5].lower() == 'paket':
|
||||||
logger.info('Skip paket unit for %s' % (article.ArtNr))
|
logger.info('Skip paket unit for %s' % (article.ArtNr))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
@ -354,7 +355,7 @@ def set_storage_type():
|
||||||
for article in articles:
|
for article in articles:
|
||||||
article.LagTyp = 4
|
article.LagTyp = 4
|
||||||
|
|
||||||
db.raw_db.commit()
|
db.raw_session.commit()
|
||||||
|
|
||||||
logger.info("Updated storage type for %s articles" % (len(articles)))
|
logger.info("Updated storage type for %s articles" % (len(articles)))
|
||||||
|
|
||||||
|
|
@ -386,7 +387,7 @@ def set_zone_placement():
|
||||||
else:
|
else:
|
||||||
logger.info("Excluded %s, wrong article class or no balance " % (article.ArtNr))
|
logger.info("Excluded %s, wrong article class or no balance " % (article.ArtNr))
|
||||||
|
|
||||||
db.raw_db.commit()
|
db.raw_session.commit()
|
||||||
|
|
||||||
logger.info("Updated placement zone for %s articles" % (zone_placements_update))
|
logger.info("Updated placement zone for %s articles" % (zone_placements_update))
|
||||||
|
|
||||||
|
|
@ -409,7 +410,7 @@ def update_decimals_on_alt_units():
|
||||||
|
|
||||||
updated_units += 1
|
updated_units += 1
|
||||||
|
|
||||||
db.raw_db.commit()
|
db.raw_session.commit()
|
||||||
logger.info("Updated decimal count for %s article units" % (updated_units))
|
logger.info("Updated decimal count for %s article units" % (updated_units))
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -454,6 +455,7 @@ if __name__ == '__main__':
|
||||||
# Article.clear_article_gtins()
|
# Article.clear_article_gtins()
|
||||||
logger.info("Creating new GTINs from base GTIN")
|
logger.info("Creating new GTINs from base GTIN")
|
||||||
create_gtins(dry_run=False)
|
create_gtins(dry_run=False)
|
||||||
|
|
||||||
# logger.info("Creating new GTINs from trading goods CSV")
|
# logger.info("Creating new GTINs from trading goods CSV")
|
||||||
# create_gtins_for_trading_goods()
|
# create_gtins_for_trading_goods()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -97,7 +97,7 @@ def update_customer_delivery_from_csv(filename='zip_codes_svhl.csv'):
|
||||||
logger.info('Amount updated %d' % customers_to_update)
|
logger.info('Amount updated %d' % customers_to_update)
|
||||||
|
|
||||||
# db.raw_db.merge(n1)
|
# db.raw_db.merge(n1)
|
||||||
db.raw_db.commit()
|
db.raw_session.commit()
|
||||||
logger.info('Succesfully commited updated customers to database')
|
logger.info('Succesfully commited updated customers to database')
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -36,9 +36,9 @@ class Warehouse():
|
||||||
ArticleShelf.LagSaldo == 0)).all()
|
ArticleShelf.LagSaldo == 0)).all()
|
||||||
|
|
||||||
for shelf in shelfs:
|
for shelf in shelfs:
|
||||||
db.raw_db.delete(shelf)
|
db.raw_session.delete(shelf)
|
||||||
|
|
||||||
db.raw_db.commit()
|
db.raw_session.commit()
|
||||||
logger.info('Deleted %s shelfs' % (len(shelfs)))
|
logger.info('Deleted %s shelfs' % (len(shelfs)))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
@ -46,9 +46,9 @@ class Warehouse():
|
||||||
replenish = db.raw_session.query(ItemReplenishmentLevels).all()
|
replenish = db.raw_session.query(ItemReplenishmentLevels).all()
|
||||||
|
|
||||||
for repl in replenish:
|
for repl in replenish:
|
||||||
db.raw_db.delete(repl)
|
db.raw_session.delete(repl)
|
||||||
|
|
||||||
db.raw_db.commit()
|
db.raw_session.commit()
|
||||||
logger.info('Deleted old replenishment levels')
|
logger.info('Deleted old replenishment levels')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
@ -78,8 +78,8 @@ class Warehouse():
|
||||||
JAPP_EWMS_multipel=unit_multiple,
|
JAPP_EWMS_multipel=unit_multiple,
|
||||||
ForetagKod=1)
|
ForetagKod=1)
|
||||||
|
|
||||||
db.raw_db.add(new_level)
|
db.raw_session.add(new_level)
|
||||||
db.raw_db.commit()
|
db.raw_session.commit()
|
||||||
|
|
||||||
|
|
||||||
# TODO: Should be moved to separate project with Lindvalls specific code
|
# TODO: Should be moved to separate project with Lindvalls specific code
|
||||||
|
|
@ -107,8 +107,8 @@ def update_shelfs_from_csv(filename='shelf_numbers_20211217.csv'):
|
||||||
MultiItemsOnBin=multiitems, JAPP_EWMS_AllowMultipleBatchesOnBin=multiitems,
|
MultiItemsOnBin=multiitems, JAPP_EWMS_AllowMultipleBatchesOnBin=multiitems,
|
||||||
ForetagKod=1)
|
ForetagKod=1)
|
||||||
|
|
||||||
db.raw_db.merge(n1)
|
db.raw_session.merge(n1)
|
||||||
db.raw_db.commit()
|
db.raw_session.commit()
|
||||||
logger.info('Succesfully commited shelfs to database')
|
logger.info('Succesfully commited shelfs to database')
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,9 @@
|
||||||
nose==1.3.7
|
nose>=1.3.7
|
||||||
Sphinx==3.2.1
|
Sphinx>=3.2.1
|
||||||
pymssql-py38==2.1.4
|
pymssql>=2.2.7
|
||||||
SQLAlchemy==1.3.22
|
SQLAlchemy>=1.3.22
|
||||||
sqlservice==1.2.1
|
sqlservice>=2.0.0
|
||||||
PyMySQL==0.10.0
|
PyMySQL>=0.10.0
|
||||||
alembic==1.4.2
|
alembic>=1.4.2
|
||||||
PyYAML==5.3.1
|
PyYAML>=5.3.1
|
||||||
gtin==0.1.13
|
gtin>=0.1.13
|
||||||
|
|
|
||||||
2
setup.py
2
setup.py
|
|
@ -3,7 +3,7 @@
|
||||||
from setuptools import setup, find_packages
|
from setuptools import setup, find_packages
|
||||||
|
|
||||||
|
|
||||||
with open('README.rst') as f:
|
with open('README.md') as f:
|
||||||
readme = f.read()
|
readme = f.read()
|
||||||
|
|
||||||
with open('LICENSE') as f:
|
with open('LICENSE') as f:
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue