Skip to content

Commit

Permalink
Merge branch 'release-0.0.0a3'
Browse files Browse the repository at this point in the history
  • Loading branch information
JavierLuna committed Sep 9, 2018
2 parents d2b2728 + 9f86275 commit cc81125
Show file tree
Hide file tree
Showing 13 changed files with 602 additions and 74 deletions.
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# Changelog

#### v0.0.0a3

* Add only() function to query. This adds [projection's support](https://cloud.google.com/datastore/docs/concepts/queries#projections).
* Add sync() to a DSEntity. It basically fetches the data from datastore to our entity. It comes handy when projection is used in a query and you don't want to delete other columns on the entity.
* Add force_sync parameter to all() query method. Forces a sync() before saving to datastore. A "safe save" if you will.
20 changes: 20 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
.PHONY: test

test:
python -m unittest discover tests/

coverage:
PYTHONPATH=. coverage run --source datastorm setup.py test
coverage html
coverage report -m

clean:
rm -rf build
rm -rf datastorm.egg-info
rm -rf dist
rm -rf htmlcov
rm .coverage

upload:
python setup.py bdist_wheel
twine upload dist/*
3 changes: 2 additions & 1 deletion Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,12 @@ name = "pypi"

[packages]
google-cloud-datastore = "*"
twine = "*"


[dev-packages]
twine = "*"
python-dotenv = "*"
coverage = "*"

[requires]
python_version = "3.6"
70 changes: 38 additions & 32 deletions Pipfile.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 9 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,16 @@ datastorm.save_multi(entity_list)
pip install datastorm
```

## Test
For running the tests you'll need a [Datastore emulator](https://cloud.google.com/datastore/docs/tools/datastore-emulator).

The recommended command for running it is:
````gcloud beta emulators datastore start --consistency=1````

You can pass the tests with ````make test````

## Disclaimer

Proper tests and a decent documentation will roll in a few days.
Proper documentation will roll in a few days.

Fork from [OrbitalAds/dittostore](https://github.com/OrbitalAds/dittostore), which I also created.
25 changes: 17 additions & 8 deletions datastorm/datastorm.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import os
from typing import List

from google.cloud import datastore
Expand All @@ -8,17 +9,25 @@

class DataStorm:

def __init__(self, project):
self.project = project
def __init__(self, project=None, namespace=None, credentials=None, _http=None, _use_grpc=None):
self.project = project or os.getenv("DATASTORE_PROJECT_ID", None)
self.credentials = credentials
self.namespace = namespace
self._http = _http
self._use_grpc = _use_grpc

@property
def client(self):
return datastore.Client(project=self.project, namespace=self.namespace, credentials=self.credentials,
_http=self._http, _use_grpc=self._use_grpc)

@property
def DSEntity(self):
return AbstractDSEntity("DSEntity", (BaseEntity,), {'__kind__': None, '__project__': self.project})
return AbstractDSEntity("DSEntity", (BaseEntity,), {'__kind__': None, '__datastorm_client__': self.client})

def save_multi(self, entities : List[BaseEntity], exclude_from_indexes=()):
client = datastore.Client(project=self.project)
[entity._save_offline(exclude_from_indexes=exclude_from_indexes) for entity in entities]
client.put_multi([entity.get_raw_entity() for entity in entities])
def save_multi(self, entities: List[BaseEntity]):
[entity._save_offline() for entity in entities]
self.client.put_multi([entity.get_raw_entity() for entity in entities])

def generate_key(self, kind: str, identifier: str, parent_key: Key = None):
return datastore.Client(project=self.project).key(kind, identifier, parent=parent_key)
return self.client.key(kind, identifier, parent=parent_key)
76 changes: 46 additions & 30 deletions datastorm/objects.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import datetime
import json
from typing import Union, Optional
from typing import Union, Optional, List

from google.cloud import datastore
from google.cloud.datastore import Key, Entity
Expand All @@ -27,7 +27,7 @@ def __ge__(self, other: Union[str, int, float, bool]):
return Query(self.field_name, ">=", other)

def __repr__(self):
return self.field_name
return self.field_name # pragma: no cover


class Query:
Expand All @@ -38,7 +38,7 @@ def __init__(self, item: str, op: str, value: Union[str, int, float, bool]):
self.value = value

def __repr__(self):
return "< Query object: {} {} {} >".format(self.item, self.op, self.value)
return "< Query object: {} {} {} >".format(self.item, self.op, self.value) # pragma: no cover


class QueryBuilder:
Expand All @@ -47,11 +47,13 @@ def __init__(self, entity_class):
self.__entity_class = entity_class
self.__project = entity_class.__project__
self.__kind = entity_class.__kind__
self.__client = entity_class.__datastorm_client__
self.__filters = entity_class.__base_filters__ or []
self.__projection = []
self.__order = []

def filter(self, filter: Query):
self.__filters.append(filter)
def filter(self, *filters: Query):
self.__filters += filters
return self

def order(self, field: Union[FilterField, str], inverted: bool = False):
Expand All @@ -60,44 +62,52 @@ def order(self, field: Union[FilterField, str], inverted: bool = False):
self.__order.append(field)
return self

def only(self, *args: List[str]):
self.__projection += args
return self

def get(self, identifier: Union[Key, str] = None, key: Key = None):
client = datastore.Client(project=self.__project)
key = key or client.key(self.__kind, identifier)
raw_entity = client.get(key)
key = key or self.__client.key(self.__kind, identifier)
raw_entity = self.__client.get(key)

return None if raw_entity is None else self.__entity_class(key, _raw_entity=raw_entity, **raw_entity)

def all(self, page_size: int = 500, parent_key: Union[Key, str] = None):
client = datastore.Client(project=self.__project)

if parent_key is not None and type(parent_key) is str:
parent_key = client.key(self.__kind, parent_key)
def all(self, page_size: int = 500, parent_key: Key = None):

query = client.query(kind=self.__kind, ancestor=parent_key)
query = self.__client.query(kind=self.__kind, ancestor=parent_key)
[query.add_filter(filter.item, filter.op, filter.value) for filter in self.__filters]

if self.__order:
query.order = self.__order

if self.__projection:
query.projection = self.__projection

cursor = None
while True:
last_yielded_entity = None
query_iter = query.fetch(start_cursor=cursor, limit=page_size)
for raw_entity in query_iter:
yield self.__entity_class(raw_entity.key, _raw_entity=raw_entity, **raw_entity)
last_yielded_entity = self.__entity_class(raw_entity.key, _raw_entity=raw_entity, **raw_entity)
yield last_yielded_entity
cursor = query_iter.next_page_token
if not cursor:
if not cursor or last_yielded_entity is None:
break

def first(self):
result = None
try:
result = next(self.all(page_size=1))
except TypeError:
result = None
except TypeError: # pragma: no cover
pass
except StopIteration: # pragma: no cover
pass

return result

def __repr__(self):
return "< QueryBuilder filters: {}, ordered by: {}>".format(self.__filters or "No filters",
self.__order or "No order")
self.__order or "No order") # pragma: no cover


class AbstractDSEntity(type):
Expand All @@ -115,6 +125,7 @@ class BaseEntity:
__project__ = None
__base_filters__ = []
__exclude__ = []
__datastorm_client__ = None
__allowed_types__ = [str, int, float, bool, datetime.datetime, dict, list]

def __init__(self, key: Union[Key, str], _kind: Optional[str] = None, _project: Optional[str] = None,
Expand All @@ -127,47 +138,52 @@ def __init__(self, key: Union[Key, str], _kind: Optional[str] = None, _project:
[setattr(self, name, self._autouncast(name, value)) for name, value in kwargs.items()]
self._save_offline()

def save(self, exclude_from_indexes: tuple = ()):
client = datastore.Client(project=self.__project__)
self._save_offline(exclude_from_indexes)
def save(self, force_sync=False):
self._save_offline()
if force_sync:
self.sync()
self.__datastorm_client__.put(self.__raw_entity)

client.put(self.__raw_entity)
def sync(self):
buffer = self.__raw_entity
self.__raw_entity = self.__datastorm_client__.get(self.key)
self.__raw_entity.update(buffer)

def _save_offline(self, exclude_from_indexes: tuple = ()):
self.__raw_entity = self.__raw_entity or datastore.Entity(key=self.key,
exclude_from_indexes=exclude_from_indexes)
fields_to_store = {attr for attr in dir(self) if
type(getattr(self, attr)) in self.__allowed_types__ and not attr.startswith("_")} - self.__default_excludes
type(getattr(self, attr)) in self.__allowed_types__ and not attr.startswith(
"_")} - self.__default_excludes
entity_dict = {attr: self._autocast(getattr(self, attr)) for attr in fields_to_store}
self.__raw_entity.update(entity_dict)

def _autocast(self, value):
if type(value) in [list, dict]:
try:
return json.dumps(value, sort_keys=True)
except:
except: # pragma: no cover
pass
return value

def _autouncast(self, property, value):
if hasattr(self, property) and type(getattr(self, property)) in [dict, list]:
try:
return json.loads(value)
except:
except: # pragma: no cover
pass
return value

def delete(self):
"""Delete the object from Datastore."""
client = datastore.Client(project=self.__project__)
client.delete(self.__raw_entity.key)
self.__datastorm_client__.delete(self.key)

@classmethod
def generate_key(cls, identifier: str, parent_key: Optional[Key] = None):
return datastore.Client(project=cls.__project__).key(cls.__kind__, identifier, parent=parent_key)
return cls.__datastorm_client__.key(cls.__kind__, identifier, parent=parent_key)

def get_raw_entity(self):
return self.__raw_entity

def __repr__(self):
return "< {name} >".format(name=self.__kind__)
return "< {name} >".format(name=self.__kind__) # pragma: no cover
Loading

0 comments on commit cc81125

Please sign in to comment.