Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
"""
The config file for setting up a QCFractal Manager, all sub fields of this model are at equal top-level of the
YAML file. No additional top-level fields are permitted, but sub-fields may have their own additions.
Not all fields are required and many will depend on the cluster you are running, and the adapter you choose
to run on.
"""
common: CommonManagerSettings = CommonManagerSettings()
server: FractalServerSettings = FractalServerSettings()
manager: QueueManagerSettings = QueueManagerSettings()
cluster: Optional[ClusterSettings] = ClusterSettings()
dask: Optional[DaskQueueSettings] = DaskQueueSettings()
parsl: Optional[ParslQueueSettings] = ParslQueueSettings()
class Config(ProtoModel.Config):
extra = "forbid"
def parse_args():
parser = argparse.ArgumentParser(
description="A CLI for a QCFractal QueueManager with a ProcessPoolExecutor, Dask, or Parsl backend. "
"The Dask and Parsl backends *requires* a config file due to the complexity of its setup. If a config "
"file is specified, the remaining options serve as CLI overwrites of the config."
)
parser.add_argument("--version", action="version", version=f"{qcfractal.__version__}")
parser.add_argument("--config-file", type=str, default=None)
# Common settings
common = parser.add_argument_group("Common Adapter Settings")
common.add_argument(
"""
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple, Union
import pandas as pd
from ..models import ObjectId, OptimizationSpecification, ProtoModel, QCSpecification, TorsionDriveInput
from ..models.torsiondrive import TDKeywords
from ..visualization import custom_plot
from .collection import BaseProcedureDataset
from .collection_utils import register_collection
if TYPE_CHECKING: # pragma: no cover
from ..models import Molecule # lgtm[py/unused-import] (https://github.com/Semmle/ql/issues/2014)
class TDEntry(ProtoModel):
"""Data model for the `reactions` list in Dataset"""
name: str
initial_molecules: Set[ObjectId]
td_keywords: TDKeywords
attributes: Dict[str, Any]
object_map: Dict[str, ObjectId] = {}
class TDEntrySpecification(ProtoModel):
name: str
description: Optional[str]
optimization_spec: OptimizationSpecification
qc_spec: QCSpecification
It is up to the individual implementations of the Collection to do things with that data
"""
self.client = client
if (self.client is not None) and not (self.client.__class__.__name__ == "FractalClient"):
raise TypeError("Expected FractalClient as `client` kwarg, found {}.".format(type(self.client)))
if "collection" not in kwargs:
kwargs["collection"] = self.__class__.__name__.lower()
kwargs["name"] = name
# Create the data model
self.data = self.DataModel(**kwargs)
class DataModel(ProtoModel):
"""
Internal Data structure base model typed by PyDantic
This structure validates input, allows server-side validation and data security,
and will create the information to pass back and forth between server and client
Subclasses of Collection can extend this class internally to change the set of
additional data defined by the Collection
"""
id: str = "local"
name: str
collection: str
provenance: Dict[str, str] = {}
a top level `Config` class, an `Executor` sub-class, and a `Provider` sub-class of the `Executor`.
Config -> Executor -> Provider. Each of these have their own options, and extra values fed into the
ParslQueueSettings are fed to the `Config` level.
It requires both `executor` and `provider` settings, but will default fill them in and often does not need
any further configuration which is handled by other settings in the config file.
"""
executor: ParslExecutorSettings = ParslExecutorSettings()
provider: ParslProviderSettings = ParslProviderSettings()
class Config(SettingsCommonConfig):
extra = "allow"
class ManagerSettings(ProtoModel):
"""
The config file for setting up a QCFractal Manager, all sub fields of this model are at equal top-level of the
YAML file. No additional top-level fields are permitted, but sub-fields may have their own additions.
Not all fields are required and many will depend on the cluster you are running, and the adapter you choose
to run on.
"""
common: CommonManagerSettings = CommonManagerSettings()
server: FractalServerSettings = FractalServerSettings()
manager: QueueManagerSettings = QueueManagerSettings()
cluster: Optional[ClusterSettings] = ClusterSettings()
dask: Optional[DaskQueueSettings] = DaskQueueSettings()
parsl: Optional[ParslQueueSettings] = ParslQueueSettings()
class Config(ProtoModel.Config):
if TYPE_CHECKING: # pragma: no cover
from ..models import Molecule # lgtm[py/unused-import] (https://github.com/Semmle/ql/issues/2014)
class TDEntry(ProtoModel):
"""Data model for the `reactions` list in Dataset"""
name: str
initial_molecules: Set[ObjectId]
td_keywords: TDKeywords
attributes: Dict[str, Any]
object_map: Dict[str, ObjectId] = {}
class TDEntrySpecification(ProtoModel):
name: str
description: Optional[str]
optimization_spec: OptimizationSpecification
qc_spec: QCSpecification
class TorsionDriveDataset(BaseProcedureDataset):
class DataModel(BaseProcedureDataset.DataModel):
records: Dict[str, TDEntry] = {}
history: Set[str] = set()
specs: Dict[str, TDEntrySpecification] = {}
class Config(BaseProcedureDataset.DataModel.Config):
pass
from ..interface.models import ObjectId, ProtoModel
from ..interface.models.rest_models import TaskQueuePOSTBody
from ..interface.models.task_models import PriorityEnum
from ..procedures import get_procedure_parser
class TaskManager(ProtoModel):
storage_socket: Optional[Any] = None
logger: Optional[Any] = None
required_tasks: Dict[str, str] = {}
tag: Optional[str] = None
priority: PriorityEnum = PriorityEnum.HIGH
class Config(ProtoModel.Config):
allow_mutation = True
serialize_default_excludes = {"storage_socket", "logger"}
def done(self) -> bool:
"""
Check if requested tasks are complete.
"""
if len(self.required_tasks) == 0:
return True
task_query = self.storage_socket.get_procedures(
id=list(self.required_tasks.values()), include=["status", "error"]
)
status_values = set(x["status"] for x in task_query["data"])
import abc
import datetime
from typing import Any, Dict, List, Optional, Set, Tuple
from pydantic import validator
from qcelemental.models import ComputeError
from ..interface.models import ObjectId, ProtoModel
from ..interface.models.rest_models import TaskQueuePOSTBody
from ..interface.models.task_models import PriorityEnum
from ..procedures import get_procedure_parser
class TaskManager(ProtoModel):
storage_socket: Optional[Any] = None
logger: Optional[Any] = None
required_tasks: Dict[str, str] = {}
tag: Optional[str] = None
priority: PriorityEnum = PriorityEnum.HIGH
class Config(ProtoModel.Config):
allow_mutation = True
serialize_default_excludes = {"storage_socket", "logger"}
def done(self) -> bool:
"""
Check if requested tasks are complete.
"""
if TYPE_CHECKING: # pragma: no cover
from ..models.gridoptimization import ScanDimension
from ..models import Molecule
class GOEntry(ProtoModel):
"""Data model for the `reactions` list in Dataset"""
name: str
initial_molecule: ObjectId
go_keywords: GOKeywords
attributes: Dict[str, Any] # Might be overloaded key types
object_map: Dict[str, ObjectId] = {}
class GOEntrySpecification(ProtoModel):
name: str
description: Optional[str]
optimization_spec: OptimizationSpecification
qc_spec: QCSpecification
class GridOptimizationDataset(BaseProcedureDataset):
class DataModel(BaseProcedureDataset.DataModel):
records: Dict[str, GOEntry] = {}
history: Set[str] = set()
specs: Dict[str, GOEntrySpecification] = {}
class Config(BaseProcedureDataset.DataModel.Config):
pass
"""
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Union
import pandas as pd
import qcelemental as qcel
from ..models import ObjectId, OptimizationSpecification, ProtoModel, QCSpecification
from .collection import BaseProcedureDataset
from .collection_utils import register_collection
if TYPE_CHECKING: # pragma: no cover
from ..models import Molecule # lgtm[py/unused-import] (https://github.com/Semmle/ql/issues/2014)
class OptEntry(ProtoModel):
"""Data model for the optimizations in a Dataset"""
name: str
initial_molecule: ObjectId
additional_keywords: Dict[str, Any] = {}
attributes: Dict[str, Any] = {}
object_map: Dict[str, ObjectId] = {}
class OptEntrySpecification(ProtoModel):
name: str
description: Optional[str]
optimization_spec: OptimizationSpecification
qc_spec: QCSpecification
protocols: qcel.models.procedures.OptimizationProtocols = qcel.models.procedures.OptimizationProtocols()
from ..models import (OptimizationRecord, OptimizationSpecification, ProtoModel, QCSpecification, TorsionDriveInput,
TorsionDriveRecord)
class TorsionDriveStaticOptions(ProtoModel):
keywords: Dict[str, Any]
optimization_spec: OptimizationSpecification
qc_spec: QCSpecification
class Config:
extra = "forbid"
allow_mutation = False
class OptimizationStaticOptions(ProtoModel):
program: str
keywords: Dict[str, Any] = {}
qc_spec: QCSpecification
class Config:
extra = "forbid"
allow_mutation = False
class OpenFFWorkflow(Collection):
"""
This is a QCA OpenFFWorkflow class.
Attributes
----------