Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
DepotManager.configure('default', {'depot.storage_path': './lfs'})
DepotManager.configure('another', {'depot.storage_path': './lfs'})
DepotManager.alias('another_alias', 'another')
DepotManager.make_middleware(None)
def teardown():
shutil.rmtree('./lfs', ignore_errors=True)
class Document(MappedClass):
class __mongometa__:
session = DBSession
name = 'depot_test_document'
_id = FieldProperty(s.ObjectId)
name = FieldProperty(str)
content = UploadedFileProperty()
photo = UploadedFileProperty(upload_type=UploadedImageWithThumb)
second_photo = UploadedFileProperty(filters=(WithThumbnailFilter((12, 12), 'PNG'),))
targeted_content = UploadedFileProperty(upload_storage='another_alias')
class TestMingAttachments(object):
def __init__(self):
self.file_content = b'this is the file content'
self.fake_file = tempfile.NamedTemporaryFile()
self.fake_file.write(self.file_content)
self.fake_file.flush()
def setup(self):
clear_database()
Create clean environment for running tests.
A lightweight alternative is setup_config_test which doesn't bootstrap app data.
'''
try:
conf_dir = tg.config.here
except AttributeError:
conf_dir = os.getcwd()
ew.TemplateEngine.initialize({})
test_file = os.path.join(conf_dir, get_config_file(config))
cmd = SetupCommand('setup-app')
cmd.run([test_file])
# run all tasks, e.g. indexing from bootstrap operations
while M.MonQTask.run_ready('setup'):
ThreadLocalORMSession.flush_all()
setup_basic_test.__test__ = False # sometimes __test__ above isn't sufficient
@LazyProperty
def server_name(self):
p1 = Popen(['hostname', '-s'], stdout=PIPE)
server_name = p1.communicate()[0].strip()
p1.wait()
return server_name
class Repository(Artifact):
class __mongometa__:
name='repository'
type_s = 'ForgeSCM Repository'
_id = FieldProperty(schema.ObjectId)
description = FieldProperty(str)
status = FieldProperty(str)
parent = FieldProperty(str)
type = FieldProperty(str, if_missing='hg')
pull_requests = FieldProperty([str])
repo_dir = FieldProperty(str)
cloned_from = FieldProperty(str)
forks = FieldProperty([dict(
project_id=schema.ObjectId,
app_config_id=schema.ObjectId(if_missing=None))])
forked_from = FieldProperty(dict(
project_id=schema.ObjectId,
app_config_id=schema.ObjectId(if_missing=None)))
commits = RelationProperty('Commit', via='repository_id',
fetch=False)
def ordered_commits(self, limit=None):
q = self.commits
q = q.sort([('rev', pymongo.DESCENDING),
('date', pymongo.DESCENDING)])
if limit:
q = q.limit(limit)
return q
def scmlib(self):
from .types import MarkdownCache
log = logging.getLogger(__name__)
class Discussion(Artifact, ActivityObject):
class __mongometa__:
name = 'discussion'
type_s = 'Discussion'
parent_id = FieldProperty(schema.Deprecated)
shortname = FieldProperty(str)
name = FieldProperty(str)
description = FieldProperty(str, if_missing='')
description_cache = FieldProperty(MarkdownCache)
num_topics = FieldProperty(int, if_missing=0)
num_posts = FieldProperty(int, if_missing=0)
subscriptions = FieldProperty({str: bool})
threads = RelationProperty('Thread', via='discussion_id')
posts = RelationProperty('Post', via='discussion_id')
def __json__(self, limit=None, posts_limit=None, is_export=False):
return dict(
_id=str(self._id),
shortname=self.shortname,
name=self.name,
description=self.description,
threads=[t.__json__(limit=posts_limit, is_export=is_export) for t
in self.thread_class().query.find(dict(discussion_id=self._id)).limit(limit or 0)]
)
def delete(self):
try:
if os.path.exists(self.repo_dir):
shutil.rmtree(self.repo_dir)
except:
log.exception('Error deleting %s', self.repo_dir)
Artifact.delete(self)
mapper(Commit).remove(dict(app_config_id=self.app_config_id))
class Commit(Artifact):
class __mongometa__:
name='commit'
type_s = 'ForgeSCM Commit'
_id = FieldProperty(schema.ObjectId)
hash = FieldProperty(str)
rev = FieldProperty(int) # only relevant for hg and svn repos
repository_id = ForeignIdProperty(Repository)
summary = FieldProperty(str)
diff = FieldProperty(str)
date = FieldProperty(datetime)
parents = FieldProperty([str])
tags = FieldProperty([str])
user = FieldProperty(str)
branch = FieldProperty(str)
repository = RelationProperty(Repository, via='repository_id')
def index(self):
result = Artifact.index(self)
result.update(
content = load(pid, 'wiki', markdown_file)
if page == 'HomePage.json':
globals = WM.Globals.query.get(
app_config_id=wiki_app.config._id)
if globals is not None:
globals.root = page_data.title
else:
globals = WM.Globals(
app_config_id=wiki_app.config._id, root=page_data.title)
p = WM.Page.upsert(page_data.title)
p.text = wiki2markdown(content)
# upload attachments
upload_attachments(p, pid, beginning)
if not p.history().first():
p.commit()
ThreadLocalORMSession.flush_all()
_id = FieldProperty(S.ObjectId)
sfx_userid = FieldProperty(S.Deprecated)
username = FieldProperty(str)
email_addresses = FieldProperty([str])
password = FieldProperty(str)
last_password_updated = FieldProperty(datetime)
projects = FieldProperty(S.Deprecated)
# full mount point: prefs dict
tool_preferences = FieldProperty(S.Deprecated)
tool_data = FieldProperty({str: {str: None}}) # entry point: prefs dict
disabled = FieldProperty(bool, if_missing=False)
pending = FieldProperty(bool, if_missing=False)
# Don't use these directly, use get/set_pref() instead
preferences = FieldProperty(dict(
results_per_page=int,
email_address=str,
email_format=str,
disable_user_messages=bool,
mention_notifications=bool,
multifactor=bool,
))
# Additional top-level fields can/should be accessed with get/set_pref also
# Not sure why we didn't put them within the 'preferences' dictionary :(
display_name = FieldPropertyDisplayName(str)
# Personal data
sex = FieldProperty(
S.OneOf('Male', 'Female', 'Other', 'Unknown',
if_missing='Unknown'))
birthdate = FieldProperty(S.DateTime, if_missing=None)
category=S.ObjectId,
messages=[dict(
messagetype=str,
created=int,
modified=int)],
tickets=dict(
solved=int,
assigned=int,
revoked=int,
totsolvingtime=int),
commits=[dict(
lines=int,
number=int,
language=S.ObjectId)])])
lastmonth = FieldProperty(dict(
messages=[dict(
datetime=datetime,
created=bool,
categories=[S.ObjectId],
messagetype=str)],
assignedtickets=[dict(
datetime=datetime,
categories=[S.ObjectId])],
revokedtickets=[dict(
datetime=datetime,
categories=[S.ObjectId])],
solvedtickets=[dict(
datetime=datetime,
categories=[S.ObjectId],
solvingtime=int)],
commits=[dict(
session = main_orm_session
name = 'mailbox'
unique_indexes = [
('user_id', 'project_id', 'app_config_id',
'artifact_index_id', 'topic', 'is_flash'),
]
indexes = [
('project_id', 'artifact_index_id'),
('is_flash', 'user_id'),
('type', 'next_scheduled'), # for q_digest
('type', 'queue_empty'), # for q_direct
# for deliver()
('project_id', 'app_config_id', 'artifact_index_id', 'topic'),
]
_id = FieldProperty(S.ObjectId)
user_id = AlluraUserProperty(if_missing=lambda: c.user._id)
project_id = ForeignIdProperty('Project', if_missing=lambda: c.project._id)
app_config_id = ForeignIdProperty(
'AppConfig', if_missing=lambda: c.app.config._id)
# Subscription filters
artifact_title = FieldProperty(str)
artifact_url = FieldProperty(str)
artifact_index_id = FieldProperty(str)
topic = FieldProperty(str)
# Subscription type
is_flash = FieldProperty(bool, if_missing=False)
type = FieldProperty(S.OneOf('direct', 'digest', 'summary', 'flash'))
frequency = FieldProperty(dict(
n=int, unit=S.OneOf('day', 'week', 'month')))