Commit 281982ca authored by Alexandr Sokolov's avatar Alexandr Sokolov

Merge branch 'dev'

# Conflicts: # README.md
parents bfd8ffb4 55ad2207
# ignore custom config file
config_research.json
*.log
__pycache__
Библиотека графоориентированной разработки.
\ No newline at end of file
# pycomsdk
A set of tools simplifying computational research based on distributed computing.
# Getting started
## Installation
This package can be installed directly from gitlab via
```bash
$ pip install git+https://sa2systems.ru:88/com/pycomsdk.git
```
All the requirements will be installed automatically.
If you want to update the package to have the most fresh one, please first uninstall it and then install it again.
## Configuration
To start working with `comsdk`, create your own `config_research.json` based on [this example](/config_research.json.example) and put it into `~/.comsdk`:
```bash
$ cp config_research.json.example ~/.comsdk/config_research.json
```
import unittest
from copy import deepcopy
import subprocess
import os
import random
import sys
from test_funcs.simplest import *
import comsdk.parser as pars
from comsdk.graph import *
from comsdk.edge import Edge
prsr = pars.Parser(tocpp=True)
data = {"a":10, "b":0}
gr = prsr.parse_file(sys.argv[1])
if sys.argv[2] !="":
prsr.generate_cpp(sys.argv[2])
else:
prsr.generate_cpp()
This diff is collapsed.
from comsdk.misc import find_dir_by_named_regexp
from functools import partial
import os
class DistributedStorage:
"""
Distributed storage is a set of sources contaning the data. The sources must be accessible by the OS API.
It is assumed that the data somewhat overlaps, namely, it should overlap in terms of the catalog hierarchy.
However, this implementation does not guarantee the uniqueness of data: instead, it uses a priority to prefer
one source over another while looking up. Even though duplicates are acceptable, the found ones will be printed
out for the sake of user's attention.
"""
def __init__(self, abs_storage_paths, prior_storage_index=0):
self.storage_paths = abs_storage_paths
self.prior_storage_index = prior_storage_index
def get_dir_path(self, dir_):
"""
Returns the full path to dir_ or None if dir_ is absent.
"""
dir_path_tuple = self.lookup_through_dir(dir_, lambda dir_path: (dir_path, dir_path)
if os.path.exists(dir_path) else None)
return dir_path_tuple[0] if dir_path_tuple is not None else None
def make_dir(self, dir_):
"""
Creates dir_ in prior storage. Returns the full path to it.
"""
path_ = os.path.join(self.storage_paths[self.prior_storage_index], dir_)
os.makedirs(path_)
return path_
def find_dir_by_named_regexp(self, parent_dir, regexp):
"""
Finds a directory in parent_dir fulfulling regexp. Returns a tuple (full_path_to_found_dir, named_params_from_regexp).
"""
return self.lookup_through_dir(parent_dir, partial(find_dir_by_named_regexp, regexp))
def lookup_through_dir(self, dir_, lookup_func):
"""
Looks up the data in dir_ by executing lookup_func on dir_. Returns a tuple (full_path_to_dir, some_data_regarding_dir)
which must, in turn, be returned by lookup_func. lookup_func must take a single argument -- full path to the dir.
"""
possible_paths = [os.path.join(source, dir_) if dir_ != '' else source for source in self.storage_paths]
found_data = None
prior_found = False
for path_i in range(len(possible_paths)):
path_ = possible_paths[path_i]
if os.path.exists(possible_paths[path_i]):
tmp_found_data = lookup_func(possible_paths[path_i])
if tmp_found_data is not None:
tmp_found_path = os.path.join(possible_paths[path_i], tmp_found_data[0])
if found_data is not None:
print("Duplicate distributed dir is found: '{}' and '{}'".format(tmp_found_path, found_data[0]))
if not prior_found:
found_data = (tmp_found_path, tmp_found_data[1])
if path_i == self.prior_storage_index:
prior_found = True
return found_data
def listdir(self, dir_):
"""
Lists the content of dir_. Returns a tuple (dirnames, filenames) which are obtained by simple union of the content of sources.
Therefore, there might be copies whose detection must be performed elsewhere.
"""
dirnames = []
filenames = []
for storage_path in self.storage_paths:
if os.path.exists(os.path.join(storage_path, dir_)):
_, dirnames_, filenames_ = next(os.walk(os.path.join(storage_path, dir_)))
dirnames += dirnames_
filenames += filenames_
return dirnames, filenames
This diff is collapsed.
import os
import subprocess
class BaseEnvironment(object):
def __init__(self):
self._programs = {}
def preprocess(self, working_dir, input_copies_list):
raise NotImplementedError()
def execute(self, working_dir, prog_name, command_line):
raise NotImplementedError()
def postprocess(self, working_dir, output_copies_list):
raise NotImplementedError()
def add_program(self, prog_name, path_to_prog):
self._programs[prog_name] = path_to_prog
# def _print_copy_msg(self, from_, to_):
# print('\tCopying %s to %s' % (from_, to_))
#
# def _print_exec_msg(self, cmd, is_remote):
# where = '@' + self._machine_name if is_remote else ''
# print('\tExecuting %s: %s' % (where, cmd))
class LocalEnvironment(BaseEnvironment):
def __init__(self):
super().__init__()
def preprocess(self, working_dir, input_copies_list):
for copy_target in input_copies_list:
_copy(self, copy_target, working_dir)
def execute(self, working_dir, prog_name, args_str):
prog_path = os.path.join(self._programs[prog_name], prog_name)
command_line = 'cd {}; {} {}'.format(working_dir, prog_path, args_str)
# use PIPEs to avoid breaking the child process when the parent process finishes
# (works on Linux, solution for Windows is to add creationflags=0x00000010 instead of stdout, stderr, stdin)
# self._print_exec_msg(command_line, is_remote=False)
#pid = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
#print(pid)
subprocess.call([command_line], shell=True)
def postprocess(self, working_dir, output_copies_list):
pass
def _copy(self, from_, to_, mode='from_local'):
"""Any mode is ignored since the copying shall be within a local machine anyway
"""
cp(from_, to_)
self._print_copy_msg(from_, to_)
def rm(self, target):
rm(target)
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
digraph CODEOBJECT_GENERATOR
{
// ??????????? ???????-????????????
FUNC_1 [module=case_gen_funcs, entry_func=function_1]
FUNC_2 [module=case_gen_funcs, entry_func=function_2]
FUNC_3 [module=case_gen_funcs, entry_func=function_3]
SAVE_TO_DB [module=case_gen_funcs, entry_func=save_to_db]
SAVE_TO_FILE [module=case_gen_funcs, entry_func=save_to_file]
REPEAT [module=case_gen_funcs, entry_func=repeat]
EXIT [module=case_gen_funcs, entry_func=exit]
CREATE_DUMP [module=case_gen_funcs, entry_func=create_dump]
// ??????????? ???????-??????????
PREDICATE_X [module=predicate_funcs, entry_func=predicate_x]
PREDICATE_Y [module=predicate_funcs, entry_func=predicate_y]
SELECTOR [module=predicate_funcs, entry_func=selector]
// ??????????? ??????? ???????? (????????)
EDGE_1 [predicate=PREDICATE_X, function=FUNC_1]
EDGE_2 [predicate=PREDICATE_Y, function=FUNC_2]
EDGE_3 [predicate=PREDICATE_X, function=FUNC_3]
EDGE_4 [predicate=PREDICATE_Y, function=SAVE_TO_DB]
EDGE_5 [predicate=PREDICATE_X, function=SAVE_TO_FILE]
EDGE_6 [predicate=PREDICATE_Y, function=REPEAT]
EDGE_7 [predicate=PREDICATE_X, function=EXIT]
EDGE_8 [function=EXIT]
EDGE_9 [predicate=CHECK_DUMP, function=EXIT]
EDGE_10 [function=CREATE_DUMP]
// ? ???? ??????? ????????? ?????????????????
CONTENT_SUBSTITUTED [parallelism=threading]
// ??????????? ???????? ??????
__BEGIN__ -> INPUT_READY
INPUT_READY -> TEPMLATE_COPIED [morphism=EDGE_1]
TEPMLATE_COPIED -> NAMES_SUBSTITUTED [morphism=EDGE_2]
NAMES_SUBSTITUTED -> CONTENT_SUBSTITUTED [morphism=EDGE_3]
CONTENT_SUBSTITUTED => DUMP_CREATED [morphism=EDGE_10]
CONTENT_SUBSTITUTED -> RESULT_SAVED [morphism=EDGE_4]
CONTENT_SUBSTITUTED -> RESULT_SAVED [morphism=EDGE_5]
// ? ??????????? ?? ?????? ?????????? SELECTOR ?????????????? ??????? ?? ??????? ??? ??????? ?????
RESULT_SAVED -> INPUT_READY, __END__ [selector=SELECTOR, morphism=(EDGE_6, EDGE_7)]
RESULT_SAVED, DUMP_CREATED -> __END__ [morphism=(EDGE_8, EDGE_9)]
}
\ No newline at end of file
{
"LOCAL_HOST": {
"research_roots": "...",
"custom_programs": {
"@path_to_binaries@": ["@bin1@", "@bin2@", ...],
...
}
"custom_commands": {
"@command_name@": "@command itself@"
},
},
"REMOTE_HOSTS": {
"@remote_host_sid@": {
"ssh_host": "...",
"max_cores": ...,
"username": "...",
"password": "...",
"pkey": "...",
"research_path": "...",
"env_programs": ["@bin1@", "@bin1@", ...],
"custom_programs": {
"@path_to_binaries@": ["@bin1@", "@bin2@", ...],
...
},
"custom_commands": {
"@command_name@": "@command itself@"
},
"sge_template_name": "...",
"job_setter": "...",
"job_finished_checker": "..."
},
...
},
"RESEARCH": {
"@research_sid@": "@research_full_name@",
...
},
"RESEARCH_PROPS": {
...
},
"PERSONAL_TASK_SHIFT": 0,
"TEMPLATES_PATH": "...",
"MEETINGS_PATH": "..."
}
\ No newline at end of file
#include <iostream>
#include <anymap.h>
extern "C" {
int PrintHello(com::Anymap) {
std::cout<<"Hello!" << std::endl;
return 0;
}
int PrintBye(com::Anymap) {
std::cout<<"Bye!" << std::endl;
return 0;
}
int PrintA(com::Anymap) {
std::cout<<"A" << std::endl;
return 0;
}
int PrintB(com::Anymap) {
std::cout<<"B" << std::endl;
return 0;
}
bool ReturnTrue(){
return true;
}
bool ReturnFalse(){
return false;
}
std::list<bool> ThreeTrue(){
return false;
}
}
\ No newline at end of file
g++ -c -fPIC ./dev/core/anymap.cpp -o anymap.o -I./dev;
g++ -c -fPIC tests.cpp -o tests.o -I./dev;
# g++ -c -fPIC ./dev/iniparser/iniparser.cpp -o iniparser.o -I./dev;
g++ tests.o anymap.o -shared -o libtest.so; rm tests.o anymap.o;
if g++ $1 -o graph.out -I./dev ./dev/core/anymap.cpp -ldl; then
./graph.out;
else
echo "Not Compiled!";
fi;
\ No newline at end of file
#include <libtools.h>
#include <anymap.h>
#include <iniparser.h>
typedef std::function<int(com::Anymap*)> IntFunc;
typedef std::function<bool(com::Anymap*)> BoolFunc;
typedef std::function<bool*(com::Anymap*)> BoolArrFunc;
IntFunc LoadEntry(std::string lib, std::string func) {
DllHandle handler;
return com::lib::loadFunction<int (com::Anymap*), DllHandle>(lib.c_str(), func.c_str(), handler);
}
BoolFunc LoadPred(std::string lib, std::string func) {
DllHandle handler;
return com::lib::loadFunction<int (com::Anymap*), DllHandle>(lib.c_str(), func.c_str(), handler);
}
BoolArrFunc LoadSelector(std::string lib, std::string func){
DllHandle handler;
return com::lib::loadFunction<bool* (com::Anymap*), DllHandle>(lib.c_str(), func.c_str(), handler);
}
void check_pred(bool predval, std::string predname) {
if (!predval) {
std::cout<<"Predicate "<<predname<<" returned FALSE!"<<std::endl;
exit(-1);
}
}
int main(int argc, char const *argv[])
{
auto data = com::Anymap();
//Predicates
% for pred in preds:
auto ${pred} = LoadPred("${pred.module}", "${pred.name}");
% endfor
//Entry functions
% for morph in morphs:
auto ${str(morph)} = LoadEntry("${morph.module}", "${morph.name}");
% endfor
//Selectors
% for sel in sels:
auto ${str(sel)} = LoadSelector("${sel.module}", "${sel.name}");
% endfor
//Branch tokens
bool* SEL_${states[0].name} = new bool[${len(states[0].transfers)}];
std::fill_n(SEL_${states[0].name}, ${len(states[0].transfers)}, true);
% for st in states[1:]:
bool* SEL_${st.name} = new bool[${len(st.transfers)}];
std::fill_n(SEL_${st.name}, ${len(st.transfers)}, false);
% endfor
${body}
TERM:
std::cout<<"Termination!\n";
return 0;
}
\ No newline at end of file
[build-system]
# These are the assumed default build requirements from pip:
# https://pip.pypa.io/en/stable/reference/pip/#pep-517-and-518-support
requires = ["setuptools>=43.0.0", "wheel"]
build-backend = "setuptools.build_meta"
\ No newline at end of file
[metadata]
# This includes the license file(s) in the wheel.
# https://wheel.readthedocs.io/en/stable/user_guide.html#including-license-files-in-the-generated-wheel-file
license_files = LICENSE
\ No newline at end of file
"""A setuptools based setup module.
See:
https://packaging.python.org/guides/distributing-packages-using-setuptools/
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
import pathlib
here = pathlib.Path(__file__).parent.resolve()
# Get the long description from the README file
long_description = (here / "README.md").read_text(encoding="utf-8")
# Arguments marked as "Required" below must be included for upload to PyPI.
# Fields marked as "Optional" may be commented out.
setup(
# This is the name of your project. The first time you publish this
# package, this name will be registered for you. It will determine how
# users can install this project, e.g.:
#
# $ pip install sampleproject
#
# And where it will live on PyPI: https://pypi.org/project/sampleproject/
#
# There are some restrictions on what makes a valid project name
# specification here:
# https://packaging.python.org/specifications/core-metadata/#name
name="comsdk", # Required
# Versions should comply with PEP 440:
# https://www.python.org/dev/peps/pep-0440/
version="0.1.0", # Required
# This is a one-line description or tagline of what your project does. This
# corresponds to the "Summary" metadata field:
# https://packaging.python.org/specifications/core-metadata/#summary
description="Tools for computational research relying on distributed computing and member interaction", # Optional
long_description=long_description, # Optional
long_description_content_type="text/markdown", # Optional (see note above)
url="https://sa2systems.ru:88/com/pycomsdk", # Optional
author="Anton Pershin", # Optional
author_email="tony.pershin@gmail.com", # Optional
# Classifiers help users find your project by categorizing it.
#
# For a list of valid classifiers, see https://pypi.org/classifiers/
classifiers=[ # Optional
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
"Development Status :: 3 - Alpha",
"Intended Audience :: Science/Research",
"Topic :: Software Development",
# Pick your license as you wish
"License :: OSI Approved :: MIT License",
# Specify the Python versions you support here. In particular, ensure
# that you indicate you support Python 3. These classifiers are *not*
# checked by 'pip install'. See instead 'python_requires' below.
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3 :: Only",
],
# This field adds keywords for your project which will appear on the
# project page. What does your project relate to?
#
# Note that this is a list of additional keywords, separated
# by commas, to be used to assist searching for the distribution in a
# larger catalog.
keywords="graph-based software engineering, distributed computing, SciOps", # Optional
# When your source code is in a subdirectory under the project root, e.g.
# `src/`, it is necessary to specify the `package_dir` argument.
#package_dir={"": "src"}, # Optional
# You can just specify package directories manually here if your project is
# simple. Or you can use find_packages().
#
# Alternatively, if you just want to distribute a single Python file, use
# the `py_modules` argument instead as follows, which will expect a file
# called `my_module.py` to exist:
#
# py_modules=["my_module"],
#
#packages=find_packages(where="src"), # Required
packages=find_packages(), # Required
# Specify which Python versions you support. In contrast to the
# 'Programming Language' classifiers above, 'pip install' will check this
# and refuse to install the project if the version does not match. See
# https://packaging.python.org/guides/distributing-packages-using-setuptools/#python-requires
python_requires=">=3.9.5, <4",
# This field lists other packages that your project depends on to run.
# Any package you put here will be installed by pip when your project is
# installed, so they must be valid existing projects.
#
# For an analysis of "install_requires" vs pip's requirements files see:
# https://packaging.python.org/discussions/install-requires-vs-requirements/
# Here is how to keep both install_requires and requirements.txt
# without duplication: https://stackoverflow.com/questions/14399534/reference-requirements-txt-for-the-install-requires-kwarg-in-setuptools-setup-py/16624700
install_requires=[
"numpy",
"jsons",
"mako",
"paramiko",
], # Optional
# If there are data files included in your packages that need to be
# installed, specify them here.
#package_data={ # Optional
# "sample": ["package_data.dat"],
#},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/distutils/setupscript.html#installing-additional-files
#
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
#data_files=[("my_data", ["data/data_file"])], # Optional
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# `pip` to create the appropriate form of executable for the target
# platform.
#
# For example, the following would provide a command called `sample` which
# executes the function `main` from this package when invoked:
#entry_points={ # Optional
# "console_scripts": [
# "sample=sample:main",
# ],
#},
# List additional URLs that are relevant to your project as a dict.
#
# This field corresponds to the "Project-URL" metadata fields:
# https://packaging.python.org/specifications/core-metadata/#project-url-multiple-use
#
# Examples listed include a pattern for specifying where the package tracks
# issues, where the source is hosted, where to say thanks to the package
# maintainers, and where to support the project financially. The key is
# what's used to render the link text on PyPI.
project_urls={ # Optional
"Bug Reports": "https://sa2systems.ru:88/com/pycomsdk/issues",
"Source": "https://sa2systems.ru:88/com/pycomsdk",
},
)
\ No newline at end of file
def dummy_edge(data):
pass
def increment_a_edge(data):
data['a'] += 1
def increment_a_array_edge(data):
for i in range(len(data['a'])):
data['a'][i] += 1
def increment_b_edge(data):
data['b'] += 1
def decrement_a_edge(data):
data['a'] -= 1
def nonzero_predicate(data):
return data['a'] != 0
def positiveness_predicate(data):
return data['a'] > 0
def nonpositiveness_predicate(data):
return data['a'] <= 0
def copy_to_c(data):
data['c'] = data['a']
def selector_a_nonpositive(data):
res = data['a'] <= 0
return [res, not res]
def true_predicate(data):
return True
digraph ADD {
FUNC [module=test_funcs.simplest, entry_func=increment_b_edge]
PRED [module=test_funcs.simplest, entry_func=positiveness_predicate]
MORPH [predicate=PRED, function=FUNC, comment="ADD"]
__BEGIN__ -> ST [morphism = MORPH]
ST -> __END__
}
\ No newline at end of file
digraph SIMPLEST {
FUNCA [module=test_funcs.simplest, entry_func=increment_a_edge]
FUNCB [module=test_funcs.simplest, entry_func=increment_b_edge]
PRED [module=test_funcs.simplest, entry_func=true_predicate]
INCR_A [predicate=PRED, function=FUNCA]
INCR_B [predicate=PRED, function=FUNCB]
__BEGIN__ -> ROOT
ROOT -> BR1, BR2 [morphism=(INCR_A, INCR_B)]
BR1 -> BR1_ST [morphism=INCR_A]
BR2 -> BR2_ST [morphism=INCR_B]
BR1_ST, BR2_ST -> MERGE [morphism=(INCR_A, INCR_B)]
MERGE -> __END__
}
\ No newline at end of file
digraph SIMPLEST {
FUNCA [module=test_funcs.simplest, entry_func=increment_a_edge]
PRED [module=test_funcs.simplest, entry_func=true_predicate]
INCR_A [predicate=PRED, function=FUNCA]
ST1 [subgraph=tests/adot/trivial.adot]
ST2 [subgraph=tests/adot/cycled.adot]
ST3 [subgraph=tests/adot/branching.adot]
__BEGIN__ -> ST1
ST1 -> ST2 [morphism=INCR_A]
ST2 -> ST3 [morphism=INCR_A]
ST3 -> __END__
}
\ No newline at end of file
digraph SIMPLEST {
FUNCA [module=libtest, entry_func=IncA]
FUNCB [module=libtest, entry_func=IncB]
CHECKA [module=libtest, entry_func=CheckAEq4]
CHECKB [module=libtest, entry_func=CheckBEq4]
SETA [module=libtest, entry_func=SetAEq1]
SETB [module=libtest, entry_func=SetBEq1]
PASS [module=libtest, entry_func=PassFunc]
PRED [module=libtest, entry_func=PassPred]
INCR_A [predicate=PRED, function=FUNCA]
INCR_B [predicate=PRED, function=FUNCB]
CH_A [predicate=CHECKA, function = PASS]
SET_A [predicate=PRED, function=SETA]
SET_B [predicate=PRED, function=SETB]
CH_B [predicate=CHECKB, function = PASS]
__BEGIN__ -> ROT [morphism=SET_A]
ROT -> ROOT[morphism=SET_B]
ROOT -> BR1, BR2 [morphism=(INCR_A, INCR_B)]
BR1 -> BR1_ST [morphism=INCR_A]
BR2 -> BR2_ST [morphism=INCR_B]
BR1_ST, BR2_ST -> MERGE [morphism=(INCR_A, INCR_B)]
MERGE -> __END__, __END__ [morphism=(CH_A, CH_B)]
}
\ No newline at end of file
digraph SIMPLEST {
FUNCA [module=test_funcs.simplest, entry_func=increment_a_edge]
PRED [module=test_funcs.simplest, entry_func=true_predicate]
INCR_A [predicate=PRED, function=FUNCA]
ST1 [subgraph=tests/adot/cpptrivial.adot]
ST2 [subgraph=tests/adot/cppcycled.adot]
ST3 [subgraph=tests/adot/cppbranching.adot]
__BEGIN__ -> ST1
ST1 -> ST2
ST2 -> ST3
ST3 -> __END__
}
\ No newline at end of file
digraph CYCLED {
SETA [module=libtest, entry_func=SetAEq10]
FUNC [module=libtest, entry_func=DecA]
PRED [module=libtest, entry_func=PassPred]
SET [predicate=PRED, function=SETA]
MORPH [predicate=PRED, function=FUNC]
SEL [module = libtest, entry_func=SelectorA]
ST2 [selector = SEL]
__BEGIN__ -> ST1 [morphism=SET]
ST1 -> ST2 [morphism=MORPH]
ST2 -> ST1 [order=1]
ST2 -> __END__ [order = 2]
}
\ No newline at end of file
digraph SIMPLE {
FUNC [module=libtest, entry_func=IncA]
PRED [module=libtest, entry_func=PassPred]
MORPH [predicate=PRED, function=FUNC]
__BEGIN__ -> ST1 [morphism = MORPH]
ST1 -> ST2 [morphism = MORPH]
ST2 -> __END__ [morphism = MORPH]
}
\ No newline at end of file
digraph CYCLED {
FUNC [module=test_funcs.simplest, entry_func=decrement_a_edge]
PRED [module=test_funcs.simplest, entry_func=true_predicate]
MORPH [predicate=PRED, function=FUNC]
SEL [module = test_funcs.simplest, entry_func = selector_a_nonpositive]
ST2 [selector = SEL]
__BEGIN__ -> ST1
ST1 -> ST2 [morphism=MORPH]
ST2 -> ST1 [order=2]
ST2 -> __END__ [order = 1]
}
\ No newline at end of file
digraph gcdhom_inverted_model_pso
{
// Определение функций-обработчиков
PASS_PROCESSOR [module=libcomsdk, entry_func=pass_processor]
CHECK_PSO_AGENT_REINIT [module=libgcdfes, entry_func=check_pso_agent_reinit, comment="Проверка о необходимости реинициализации отдельной частицы (смещение частицы) в рое в рамках метода роя частиц."]
CHECK_PSO_SWARM_REINIT [module=libgcdfes, entry_func=check_pso_swarm_reinit, comment="Проверка о необходимости реинициализации всего роя частиц в рамках метода роя частиц."]
PSO_AGENT_REINIT [module=libgcdfes, entry_func=pso_agent_reinit, comment="Реинициализация отдельной частицы (смещение частицы) в рое в рамках метода роя частиц."]
PSO_SWARM_REINIT [module=libgcdfes, entry_func=pso_swarm_reinit, comment="Реинициализация всего роя частиц в рамках метода роя частиц."]
PSO_SWARM_ANALYSING [module=libgcdfes, entry_func=pso_swarm_analysing, comment="Анализ всего роя частиц в рамках метода роя частиц."]
PSO_HOM_AGENT_POSTPROC [module=libgcdfes, entry_func=pso_hom_agent_postproc, comment="Постпроцессинг после решения отдельной задачи методом асимптотического осреднения."]
PSO_TASK_DATA_REINIT [module=libgcdfes, entry_func=pso_task_data_reinit, comment="Реинициализация постановки задачи анализа эффективных характеристик КМ методом асимптотического осреднения."]
PSO_AGENT_INIT [module=libgcdfes, entry_func=pso_agent_init, comment="Инициализация отдельной частицы в рамках метода роя частиц."]
PSO_SWARM_INIT [module=libgcdfes, entry_func=pso_swarm_init, comment="Инициализация роя частиц."]
PSO_INIT [module=libgcdfes, entry_func=pso_swarm_init, comment="Инициализация метода роя частиц."]
// Определение функций-предикатов
PASS_PREDICATE [module=libcomsdk, entry_func=pass_predicate]
// Определение морфизмов
PASS_MORPHISM [predicate=PASS_PREDICATE, function=PASS_PROCESSOR, comment="ПАСС, морфизм."]
PSO_AGENT_REINIT_MORPHISM [predicate=PASS_PREDICATE, function=PSO_AGENT_REINIT]
PSO_SWARM_REINIT_MORPHISM [predicate=PASS_PREDICATE, function=PSO_SWARM_REINIT]
PSO_SWARM_ANALYSING_MORPHISM [predicate=PASS_PREDICATE, function=PSO_SWARM_ANALYSING]
PSO_HOM_AGENT_POSTPROC_MORPHISM [predicate=PASS_PREDICATE, function=PSO_HOM_AGENT_POSTPROC]
PSO_TASK_DATA_REINIT_MORPHISM [predicate=PASS_PREDICATE, function=PSO_TASK_DATA_REINIT]
PSO_AGENT_INIT_MORPHISM [predicate=PASS_PREDICATE, function=PSO_AGENT_INIT]
PSO_SWARM_INIT_MORPHISM [predicate=PASS_PREDICATE, function=PSO_SWARM_INIT]
PSO_INIT_MORPHISM [predicate=PASS_PREDICATE, function=PSO_INIT]
// Определение атрибутов узлов
S_1 [subgraph=gcdhom_preprocessor.adot]
S_5 [subgraph=gcdhom_processor.adot]
S_6 [selector=CHECK_PSO_AGENT_REINIT]
S_7 [selector=CHECK_PSO_SWARM_REINIT]
// Определение топологии графовой модели метода конечных элементов
__BEGIN__ -> S_1
S_1 -> S_2 [morphism=PSO_INIT_MORPHISM]
S_2 -> S_3 [morphism=PSO_SWARM_INIT_MORPHISM]
S_3 -> S_4 [morphism=PSO_AGENT_INIT_MORPHISM]
S_4 -> S_5 [morphism=PSO_TASK_DATA_REINIT_MORPHISM]
S_5 -> S_6 [morphism=PSO_HOM_AGENT_POSTPROC_MORPHISM]
S_6 -> S_4, S_7 [morphism=(PSO_AGENT_REINIT_MORPHISM, PSO_SWARM_ANALYSING_MORPHISM), order=(10,20)]
S_7 -> S_4, S_8 [morphism=(PSO_SWARM_REINIT_MORPHISM, PASS_MORPHISM), order=(30,40)]
S_8 -> __END__ [comment = "Расчет завершён."]
}
digraph SIMPLEST {
FUNCA [module=test_funcs.simplest, entry_func=increment_a_edge]
FUNCB [module=test_funcs.simplest, entry_func=increment_b_edge]
PRED [module=test_funcs.simplest, entry_func=positiveness_predicate]
INCR_A [predicate=PRED, function=FUNCA]
INCR_B [predicate=PRED, function=FUNCB]
__BEGIN__ -> ROOT
ROOT -> BR1, BR2, BR3 [morphism=(INCR_A, INCR_A, INCR_A)]
//BR3 -> SIBL3_BR1, SIBL3_BR2 [morphism=(INCR_A, INCR_A)]
//BR2 -> SIBL2_BR1, SIBL2_BR2 [morphism=(INCR_A, INCR_A)]
//SIBL3_BR1 -> SIBL3_BR1_1, SIBL3_BR1_2 [morphism=(INCR_A, INCR_A)]
//SIBL3_BR1_1, SIBL3_BR1_2 -> TERM [morphism=(INCR_A, INCR_A)]
//BR1, SIBL2_BR1, SIBL2_BR2, TERM, SIBL3_BR2 -> __END__ [morphism=(INCR_A, INCR_A, INCR_A, INCR_A, INCR_A)]
BR1, BR2, BR3 -> __END__ [morphism=(INCR_A, INCR_A, INCR_A)]
}
\ No newline at end of file
digraph TEST_SUB {
FUNC [module=test_funcs.simplest, entry_func=increment_a_edge]
PRED [module=test_funcs.simplest, entry_func=positiveness_predicate]
MORPH [predicate=PRED, function=FUNC]
SEL [module=test_funcs.simplest, entry_func=selector_a_nonpositive]
ST2 [subgraph = tests/adot/file.adot]
ST3 [selector = SEL]
__BEGIN__ -> ST1 [morphism = MORPH]
ST1 -> ST2
ST2 -> ST3
ST3 -> __END__
}
\ No newline at end of file
digraph TRIVIAL {
FUNC [module=test_funcs.simplest, entry_func=increment_a_edge]
PRED [module=test_funcs.simplest, entry_func=true_predicate]
MORPH [predicate=PRED, function=FUNC, comment="ADD"]
__BEGIN__ -> ST1 [morphism = MORPH]
ST1 -> ST2 [morphism = MORPH]
ST2 -> __END__ [morphism = MORPH]
}
\ No newline at end of file
#include<fstream>
using namespace std;
int main(int argc, char* argv[])
{
string input_file_path(argv[1]);
string output_file_path("b.dat");
ifstream f_in(input_file_path);
int x;
f_in >> x;
ofstream f_out(output_file_path);
f_out << x*x;
return 0;
}
#$ -cwd -V
#$ -l h_rt=12:00:00
#$ -pe smp 12
/home/home01/mmap/tests/square/square /home/home01/mmap/tests/square_test_dir/a.dat
#$ -cwd -V
#$ -l h_rt=12:00:00
#$ -pe smp 12
./findsoln -symms reflect_symmetry.asc -R 170.320 -o find-170.320 -es 1e-15 -eqb find-170.330/ubest.h5
qsub fe_170.315.sh
This diff is collapsed.
import unittest
import subprocess
from comsdk.graph import *
from comsdk.parser import Parser
path_to_comsdk = "/home/lbstr/bmstu/comsdk"
path_to_pycomsdk = "/home/lbstr/bmstu/pycomsdk"
class ParserGoodCheck(unittest.TestCase):
def test_trivial_graph(self):
parsr = Parser()
gr = parsr.parse_file("./tests/adot/trivial.adot")
data = {"a": 1}
gr.run(data)
self.assertEqual(data["a"], 4)
def test_branching_graph(self):
parsr = Parser()
gr = parsr.parse_file("./tests/adot/branching.adot")
data = {"a": 1, "b": 1}
gr.run(data)
self.assertEqual(data["a"], 4)
self.assertEqual(data["b"], 4)
def test_cycled_graph(self):
parsr = Parser()
gr = parsr.parse_file("./tests/adot/cycled.adot")
data = {"a": 10}
gr.run(data)
self.assertEqual(data["a"], 0)
def test_complex_graph(self):
parsr = Parser()
gr = parsr.parse_file("./tests/adot/complex.adot")
data = {"a": 1, "b": 1}
gr.run(data)
self.assertEqual(data["a"], 4)
self.assertEqual(data["b"], 4)
def test_cpp_trivial_graph(self):
parsr = Parser(tocpp=True)
gr = parsr.parse_file("./tests/adot/cpptrivial.adot")
parsr.generate_cpp(path_to_comsdk+"res.cpp")
command = "cd "+path_to_comsdk+"; "+path_to_pycomsdk+"/cpp/run.sh "+path_to_comsdk+"res.cpp"
subprocess.check_output(["bash", "-c", command])
def test_cpp_branching_graph(self):
parsr = Parser(tocpp=True)
gr = parsr.parse_file("./tests/adot/cppbranching.adot")
parsr.generate_cpp(path_to_comsdk+"res.cpp")
command = "cd "+path_to_comsdk+"; "+path_to_pycomsdk+"/cpp/run.sh "+path_to_comsdk+"res.cpp"
subprocess.check_output(["bash", "-c", command])
def test_cpp_cycled_graph(self):
parsr = Parser(tocpp=True)
gr = parsr.parse_file("./tests/adot/cppcycled.adot")
parsr.generate_cpp(path_to_comsdk+"res.cpp")
command = "cd "+path_to_comsdk+"; "+path_to_pycomsdk+"/cpp/run.sh "+path_to_comsdk+"res.cpp"
subprocess.check_output(["bash", "-c", command])
def test_cpp_complex_graph(self):
parsr = Parser(tocpp=True)
gr = parsr.parse_file("./tests/adot/cppcomplex.adot")
parsr.generate_cpp(path_to_comsdk+"res.cpp")
command = "cd "+path_to_comsdk+"; "+path_to_pycomsdk+"/cpp/run.sh "+path_to_comsdk+"res.cpp"
subprocess.check_output(["bash", "-c", command])
if __name__ == '__main__':
unittest.main()
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment