From 42eea5a9219d6390506e1cd1d2e10ac5b6de68c7 Mon Sep 17 00:00:00 2001 From: gaoyunshan Date: Fri, 25 Oct 2024 15:51:45 +0800 Subject: [PATCH 1/5] init workflow --- _cmd.py | 2 +- _deploy.py | 21 +++ _plugin.py | 4 +- _workflow.py | 218 ++++++++++++++++++++++++++++ const.py | 14 +- core.py | 115 +++++++++++---- init.sh | 9 +- plugins/general/0.1/status_check.py | 40 +++++ workflows/general/0.1/display.py | 30 ++++ 9 files changed, 416 insertions(+), 37 deletions(-) create mode 100644 _workflow.py create mode 100644 plugins/general/0.1/status_check.py create mode 100644 workflows/general/0.1/display.py diff --git a/_cmd.py b/_cmd.py index 7396a9bb..7af42e11 100644 --- a/_cmd.py +++ b/_cmd.py @@ -197,7 +197,7 @@ def init_home(self): if not COMMAND_ENV.get(ENV.ENV_OBD_ID): COMMAND_ENV.set(ENV.ENV_OBD_ID, uuid()) if VERSION != version: - for part in ['plugins', 'config_parser', 'optimize', 'mirror/remote']: + for part in ['workflows', 'config_parser', 'optimize', 'mirror/remote']: obd_part_dir = os.path.join(self.OBD_PATH, part) if DirectoryUtil.mkdir(self.OBD_PATH): root_part_path = os.path.join(self.OBD_INSTALL_PATH, part) diff --git a/_deploy.py b/_deploy.py index 598b7f9a..d0cb0b10 100644 --- a/_deploy.py +++ b/_deploy.py @@ -1096,6 +1096,27 @@ def enable_mem_mode(self): def disable_mem_mode(self): self._mem_mode = False + @property + def sorted_components(self): + available_depends = list(self.components.keys()) + unsort_components = available_depends + sorted_components = [] + while unsort_components: + components = unsort_components + unsort_components = [] + for component in components: + cluster_config = self.components[component] + for component_name in cluster_config.depends: + if component_name not in available_depends: + continue + if component_name not in sorted_components: + unsort_components.append(component) + break + else: + sorted_components.append(component) + return sorted_components + + @property def user(self): return self._user diff --git a/_plugin.py b/_plugin.py index c5ca633a..0a0814de 100644 --- a/_plugin.py +++ b/_plugin.py @@ -385,6 +385,7 @@ class Null(object): def __init__(self): pass + class ParamPlugin(Plugin): @@ -807,13 +808,14 @@ def requirement_list(self, package_info): class ComponentPluginLoader(object): PLUGIN_TYPE = None + MODULE_NAME = __name__ def __init__(self, home_path, plugin_type=PLUGIN_TYPE, dev_mode=False, stdio=None): if plugin_type: self.PLUGIN_TYPE = plugin_type if not self.PLUGIN_TYPE: raise NotImplementedError - self.plguin_cls = getattr(sys.modules[__name__], self.PLUGIN_TYPE.value, False) + self.plguin_cls = getattr(sys.modules[self.MODULE_NAME], self.PLUGIN_TYPE.value, False) if not self.plguin_cls: raise ImportError(self.PLUGIN_TYPE.value) self.dev_mode = dev_mode diff --git a/_workflow.py b/_workflow.py new file mode 100644 index 00000000..ec1b91ac --- /dev/null +++ b/_workflow.py @@ -0,0 +1,218 @@ +# coding: utf-8 +# OceanBase Deploy. +# Copyright (C) 2021 OceanBase +# +# This file is part of OceanBase Deploy. +# +# OceanBase Deploy is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# OceanBase Deploy is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with OceanBase Deploy. If not, see . + + +from __future__ import absolute_import, division, print_function + +import os +import sys + +from _manager import Manager +from _plugin import ComponentPluginLoader, pyScriptPluginExec, PyScriptPluginLoader, PyScriptPlugin +from tool import OrderedDict + + +class WorkflowsIter: + + def __init__(self, workflows): + self.workflows = workflows + self.stages = [] + for workflow in workflows: + self.stages += workflow.stages + self.stages = sorted(set(self.stages)) + self.index = 0 + self.lentgh = len(self.stages) + + def __iter__(self): + return self + + def __next__(self): + if self.index < self.lentgh: + stage = self.stages[self.index] + self.index += 1 + stages = OrderedDict() + for workflow in self.workflows: + stages[workflow.component_name] = workflow[stage] + return stages + else: + raise StopIteration + + +class Workflows(object): + + def __init__(self, name): + self.name = name + self.workflows = {} + + def __getitem__(self, component_name): + if component_name not in self.workflows: + self.workflows[component_name] = ComponentWorkflow(self.name, component_name) + return self.workflows[component_name] + + def __setitem__(self, component_name, component_workflow): + if not isinstance(component_workflow, ComponentWorkflow): + raise TypeError("%s must be a instance of ComponentWorkflow" % component_workflow.__class__.__name__) + if component_workflow.name != self.name: + raise ValueError("%s is not a %s workflow" % (component_workflow, self.name)) + self.workflows[component_name] = component_workflow + + def __call__(self, dpeloy_config): + workflows = [ + self[component] for component in dpeloy_config.sorted_components + ] + return WorkflowsIter(workflows) + + +class PluginTemplate(object): + + def __init__(self, name, component_name, version=None, kwargs=None): + self.name = name + self.component_name = component_name + self.version = version + self.kwargs = kwargs or {} + + +class ComponentWorkflow(object): + + def __init__(self, name, component_name): + self.name = name + self.component_name = component_name + self.stage = {} + + def add(self, stage, *plugins): + return self.add_with_kwargs(stage, None, *plugins) + + def add_with_component(self, stage, component_name, *plugins): + return self.add_with_component_version(stage, component_name, None, *plugins) + + def add_with_component_version(self, stage, component_name, version, *plugins): + return self.add_with_component_version_kwargs(stage, component_name, version, None, *plugins) + + def add_with_kwargs(self, stage, kwargs, *plugins): + return self.add_with_component_version_kwargs(stage, self.component_name, None, kwargs, *plugins) + + def add_with_component_version_kwargs(self, stage, component_name, version, kwargs, *plugins): + stage = int(stage) + plugins = [PluginTemplate(plugin, component_name, version, kwargs) for plugin in plugins] + if stage not in self.stage: + self.stage[stage] = plugins + else: + self.stage[stage] += plugins + + @property + def stages(self): + return sorted(self.stage.keys()) + + def __getitem__(self, stage): + return self.stage.get(stage, []) + + +class ComponentWorkflowLoader(ComponentPluginLoader): + MODULE_NAME = __name__ + + +def workflowTemplateExec(func): + def _new_func( + self, namespace, namespaces, deploy_name, deploy_status, + repositories, components, clients, cluster_config, cmd, + options, stdio, *arg, **kwargs + ): + workflow = ComponentWorkflow(self.name, self.component_name) + ret = pyScriptPluginExec(func)(self, namespace, namespaces, deploy_name, deploy_status, + repositories, components, [], cluster_config, cmd, + options, stdio, workflow, *arg, **kwargs) + return workflow if ret else None + return _new_func + + +class WorkflowLoader(ComponentWorkflowLoader): + + def __init__(self, home_path, workflow_name=None, dev_mode=False, stdio=None): + if not workflow_name: + raise NotImplementedError + type_name = 'PY_SCRIPT_WORKFLOW_%s' % workflow_name.upper() + type_value = 'PyScriptWorkflow%sPlugin' % ''.join([word.capitalize() for word in workflow_name.split('_')]) + self.PLUGIN_TYPE = PyScriptPluginLoader.PyScriptPluginType(type_name, type_value) + if not getattr(sys.modules[__name__], type_value, False): + self._create_(workflow_name) + super(WorkflowLoader, self).__init__(home_path, dev_mode=dev_mode, stdio=stdio) + self.workflow_name = workflow_name + + def _create_(self, workflow_name): + exec(''' +class %s(PyScriptPlugin): + + FLAG_FILE = '%s.py' + PLUGIN_NAME = '%s' + + def __init__(self, component_name, plugin_path, version, dev_mode): + super(%s, self).__init__(component_name, plugin_path, version, dev_mode) + + @staticmethod + def set_plugin_type(plugin_type): + %s.PLUGIN_TYPE = plugin_type + + @workflowTemplateExec + def %s( + self, namespace, namespaces, deploy_name, deploy_status, + repositories, components, clients, cluster_config, cmd, + options, stdio, *arg, **kwargs): + pass + ''' % (self.PLUGIN_TYPE.value, workflow_name, workflow_name, self.PLUGIN_TYPE.value, self.PLUGIN_TYPE.value, workflow_name)) + clz = locals()[self.PLUGIN_TYPE.value] + setattr(sys.modules[__name__], self.PLUGIN_TYPE.value, clz) + clz.set_plugin_type(self.PLUGIN_TYPE) + return clz + + +class ComponentWorkflowLoader(WorkflowLoader): + + def __init__(self, home_path, component_name, workflow_name=None, dev_mode=False, stdio=None): + super(ComponentWorkflowLoader, self).__init__(os.path.join(home_path, component_name), workflow_name, dev_mode=dev_mode, stdio=stdio) + self._general_loader = WorkflowLoader(os.path.join(home_path, "general"), workflow_name, dev_mode=dev_mode, stdio=stdio) + self._general_loader.component_name = component_name + + + def get_workflow_template(self, version): + template = self.get_best_plugin(version) + if not template: + template = self._general_loader.get_best_plugin(version) + return template + + +class WorkflowManager(Manager): + + RELATIVE_PATH = 'workflows' + # The directory structure for plugin is ./workflows/{component_name}/{version} + + def __init__(self, home_path, dev_mode=False, stdio=None): + super(WorkflowManager, self).__init__(home_path, stdio=stdio) + self.workflow_loaders = {} + self.dev_mode = dev_mode + + def get_loader(self, workflow_name, component_name): + if component_name not in self.workflow_loaders: + self.workflow_loaders[component_name] = {} + if workflow_name not in self.workflow_loaders[component_name]: + self.workflow_loaders[component_name][workflow_name] = ComponentWorkflowLoader(self.path, component_name, workflow_name, self.dev_mode, stdio=self.stdio) + return self.workflow_loaders[component_name][workflow_name] + + def get_workflow_template(self, workflow_name, component_name, version): + loader = self.get_loader(workflow_name, component_name) + return loader.get_workflow_template(version) diff --git a/const.py b/const.py index 70d5dadf..d93599df 100644 --- a/const.py +++ b/const.py @@ -78,4 +78,16 @@ TOOL_TPCC: 'tpcc', TOOL_SYSBENCH: 'sysbench', } -TOOL_TPCC_BENCHMARKSQL = 'OB-BenchmarkSQL-5.0.jar' \ No newline at end of file +TOOL_TPCC_BENCHMARKSQL = 'OB-BenchmarkSQL-5.0.jar' + +#workflow stages +STAGE_FIRST = 10 +STAGE_SECOND = 20 +STAGE_THIRD = 30 +STAGE_FOURTH = 40 +STAGE_FIFTH = 50 +STAGE_SIXTH = 60 +STAGE_SEVENTH = 70 +STAGE_EIGHTH = 80 +STAGE_NINTH = 90 +STAGE_TENTH = 100 \ No newline at end of file diff --git a/core.py b/core.py index e22e2885..680019e8 100644 --- a/core.py +++ b/core.py @@ -38,6 +38,7 @@ from _mirror import MirrorRepositoryManager, PackageInfo, RemotePackageInfo from _plugin import PluginManager, PluginType, InstallPlugin, PluginContextNamespace from _deploy import DeployManager, DeployStatus, DeployConfig, DeployConfigStatus, Deploy, ClusterStatus +from _workflow import WorkflowManager, Workflows from _tool import Tool, ToolManager from _repository import RepositoryManager, LocalPackage, Repository, RepositoryVO import _errno as err @@ -62,6 +63,7 @@ def __init__(self, home_path, dev_mode=False, lock_mode=None, stdio=None): self._repository_manager = None self._deploy_manager = None self._plugin_manager = None + self._workflow_manager = None self._lock_manager = None self._optimize_manager = None self._tool_manager = None @@ -97,6 +99,12 @@ def plugin_manager(self): self._plugin_manager = PluginManager(self.home_path, self.dev_mode, self.stdio) return self._plugin_manager + @property + def workflow_manager(self): + if not self._workflow_manager: + self._workflow_manager = WorkflowManager(self.home_path, self.dev_mode, self.stdio) + return self._workflow_manager + @property def deploy_manager(self): if not self._deploy_manager: @@ -170,7 +178,61 @@ def get_namespace(self, spacename): self.namespaces[spacename] = namespace return namespace - def call_plugin(self, plugin, repository, spacename=None, target_servers=None, **kwargs): + def get_workflows(self, workflow_name, repositories=None, no_found_act='exit'): + if not repositories: + repositories = self.repositories + workflows = Workflows(workflow_name) + for repository in repositories: + workflows[repository.name] = self.get_workflow(repository, workflow_name, repository.name, repository.version, no_found_act=no_found_act) + return workflows + + def get_workflow(self, repository, workflow_name, component_name, version=0.1, no_found_act='exit'): + if no_found_act == 'exit': + no_found_exit = True + else: + no_found_exit = False + msg_lv = 'warn' if no_found_act == 'warn' else 'verbose' + self._call_stdio('verbose', 'Searching %s template for components ...', workflow_name) + template = self.workflow_manager.get_workflow_template(workflow_name, component_name, version) + if template: + ret = self.call_workflow_template(template, repository) + if ret: + self._call_stdio('verbose', 'Found for %s for %s-%s' % (template, template.component_name, template.version)) + return ret + if no_found_exit: + self._call_stdio('critical', 'No such %s template for %s-%s' % (template, template.component_name, template.version)) + exit(1) + else: + self._call_stdio(msg_lv, 'No such %s template for %s-%s' % (template, template.component_name, template.version)) + + def run_workflow(self, workflows, deploy_config=None, repositories=None, no_found_act='exit'): + if not deploy_config: + deploy_config = self.deploy.deploy_config + if not repositories: + repositories = self.repositories + repositories = {repository.name: repository for repository in repositories} + for stages in workflows(deploy_config): + for component_name in stages: + for plugin_template in stages[component_name]: + if 'repository' in plugin_template.kwargs: + repository = plugin_template.kwargs['repository'] + del plugin_template.kwargs['repository'] + else: + if plugin_template.component_name in repositories: + repository = repositories[plugin_template.component_name] + else: + repository = repositories[component_name] + if not plugin_template.version: + if plugin_template.component_name in repositories: + plugin_template.version = repositories[component_name].version + else: + plugin_template.version = repository.version + plugin = self.search_py_script_plugin_by_template(plugin_template, no_found_act=no_found_act) + if plugin and not self.call_plugin(plugin, repository, **plugin_template.kwargs): + return False + return True + + def _init_call_args(self, repository, spacename=None, target_servers=None, **kwargs): args = { 'namespace': self.get_namespace(repository.name if spacename == None else spacename), 'namespaces': self.namespaces, @@ -192,9 +254,18 @@ def call_plugin(self, plugin, repository, spacename=None, target_servers=None, * args['cluster_config'] = self.deploy.deploy_config.components[repository.name] if "clients" not in kwargs: args['clients'] = self.get_clients(self.deploy.deploy_config, self.repositories) + args['clients'] = args.get('clients', {}) args.update(kwargs) - - self._call_stdio('verbose', 'Call %s for %s' % (plugin, repository)) + return args + + def call_workflow_template(self, workflow_template, repository): + self._call_stdio('verbose', 'Call workflow %s for %s' % (workflow_template, repository)) + args = self._init_call_args(repository, None, None, clients=None) + return workflow_template(**args) + + def call_plugin(self, plugin, repository, spacename=None, target_servers=None, **kwargs): + self._call_stdio('verbose', 'Call plugin %s for %s' % (plugin, repository)) + args = self._init_call_args(repository, spacename, target_servers, **kwargs) return plugin(**args) def _call_stdio(self, func, msg, *arg, **kwarg): @@ -348,6 +419,11 @@ def search_plugins(self, repositories, plugin_type, no_found_exit=True): return None return plugins + def search_py_script_plugin_by_template(self, template, no_found_act='exit'): + repository = self.repository_manager.get_repository_allow_shadow(template.component_name, template.version) + plugins = self.search_py_script_plugin([repository], template.name, no_found_act=no_found_act) + return plugins.get(repository) + def search_py_script_plugin(self, repositories, script_name, no_found_act='exit'): if no_found_act == 'exit': no_found_exit = True @@ -2674,8 +2750,8 @@ def _reload_cluster(self, deploy, repositories): cluster_config = deploy_config.components[repository.name] new_cluster_config = new_deploy_config.components[repository.name] - if not self.call_plugin(connect_plugins[repository], repository): - if not self.call_plugin(connect_plugins[repository], repository, components=new_deploy_config.components.keys(), cluster_config=new_cluster_config): + if not self.call_plugin(connect_plugins[repository], repository, retry_times=30): + if not self.call_plugin(connect_plugins[repository], repository, components=new_deploy_config.components.keys(), cluster_config=new_cluster_config, retry_times=30): continue if not self.call_plugin(reload_plugins[repository], repository, new_cluster_config=new_cluster_config): @@ -2715,34 +2791,13 @@ def display_cluster(self, name): # Check whether the components have the parameter plugins and apply the plugins self.search_param_plugin_and_apply(repositories, deploy_config) - connect_plugins = self.search_py_script_plugin(repositories, 'connect') - display_plugins = self.search_py_script_plugin(repositories, 'display') self._call_stdio('stop_loading', 'succeed') # Get the client - ssh_clients = self.get_clients(deploy_config, repositories) - - # Check the status for the deployed cluster - component_status = {} - self.cluster_status_check(repositories, component_status) - - for repository in repositories: - cluster_status = component_status[repository] - servers = [] - for server in cluster_status: - if cluster_status[server] == 0: - self._call_stdio('warn', '%s %s is stopped' % (server, repository.name)) - else: - servers.append(server) - if not servers: - continue - - if not self.call_plugin(connect_plugins[repository], repository): - continue - self.call_plugin(display_plugins[repository], repository) - - return True - + self.get_clients(deploy_config, repositories) + workflows = self.get_workflows('display') + return self.run_workflow(workflows) + def stop_cluster(self, name): self._call_stdio('verbose', 'Get Deploy by name') deploy = self.deploy_manager.get_deploy_config(name) diff --git a/init.sh b/init.sh index 9b973c96..2efe7137 100644 --- a/init.sh +++ b/init.sh @@ -19,6 +19,7 @@ if [ ${FORCE_DEPLOY} == "1" ]; then rm -rf ${OBD_HOME}/config_parser rm -rf ${OBD_HOME}/optimize rm -rf ${OBD_HOME}/plugins + rm -rf ${OBD_HOME}/workflows sudo rm -rf /etc/profile.d/obd.sh fi @@ -33,7 +34,7 @@ if [ ! -e "OceanBase.repo" ]; then wget -q https://mirrors.aliyun.com/oceanbase/OceanBase.repo fi -mkdir -p ${OBD_HOME}/{plugins,optimize,config_parser} +mkdir -p ${OBD_HOME}/{workflows,plugins,optimize,config_parser} mkdir -p ${OBD_HOME}/plugins/obproxy-ce/3.1.0 @@ -46,7 +47,7 @@ for DIR in ${WORK_DIR}/plugins/obproxy-ce/3.1.0/ ${WORK_DIR}/plugins/obproxy/3.1 done done -for DIR in plugins optimize config_parser; do +for DIR in workflows plugins optimize config_parser; do FILE_LIST=$(ls ${WORK_DIR}/${DIR}) for FILE in $FILE_LIST; do if [ ! -e "${OBD_HOME}/${DIR}/${FILE}" ]; then @@ -59,7 +60,7 @@ if [ ! -e ${OBD_HOME}/optimize/obproxy-ce ]; then ln -s ${OBD_HOME}/optimize/obproxy ${OBD_HOME}/optimize/obproxy-ce fi -for DIR in plugins config_parser; do +for DIR in workflows plugins config_parser; do if [ ! -e ${OBD_HOME}/${DIR}/oceanbase-ce ]; then ln -s ${OBD_HOME}/${DIR}/oceanbase ${OBD_HOME}/${DIR}/oceanbase-ce fi @@ -68,7 +69,7 @@ for DIR in plugins config_parser; do fi done -echo '' >> ${OBD_HOME}/version +echo -n '' > ${OBD_HOME}/version echo "============update .bashrc============" diff --git a/plugins/general/0.1/status_check.py b/plugins/general/0.1/status_check.py new file mode 100644 index 00000000..8f1728e8 --- /dev/null +++ b/plugins/general/0.1/status_check.py @@ -0,0 +1,40 @@ +# coding: utf-8 +# OceanBase Deploy. +# Copyright (C) 2021 OceanBase +# +# This file is part of OceanBase Deploy. +# +# OceanBase Deploy is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# OceanBase Deploy is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with OceanBase Deploy. If not, see . + + +from __future__ import absolute_import, division, print_function + +from _deploy import ClusterStatus + + +def status_check(plugin_context, target_status=ClusterStatus.STATUS_RUNNING, is_error=False, *args, **kwargs): + stdio = plugin_context.stdio + cluster_config = plugin_context.cluster_config + cluster_status = plugin_context.get_return('status').get_return('cluster_status') + print_msg = stdio.error if is_error else stdio.warn + status_msg = 'running' if target_status == ClusterStatus.STATUS_RUNNING else 'stopped' + status_check_pass = True + for server in cluster_status: + if cluster_status[server] != target_status.value: + status_check_pass = False + print_msg("%s %s is not %s" % (server, cluster_config.name, status_msg)) + + if status_check_pass: + return plugin_context.return_true(status_check_pass=status_check_pass) + return plugin_context.return_false(status_check_pass=status_check_pass) diff --git a/workflows/general/0.1/display.py b/workflows/general/0.1/display.py new file mode 100644 index 00000000..166abea3 --- /dev/null +++ b/workflows/general/0.1/display.py @@ -0,0 +1,30 @@ +# coding: utf-8 +# OceanBase Deploy. +# Copyright (C) 2021 OceanBase +# +# This file is part of OceanBase Deploy. +# +# OceanBase Deploy is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# OceanBase Deploy is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with OceanBase Deploy. If not, see . + + +from __future__ import absolute_import, division, print_function + +import const + + +def display(plugin_context, workflow, *args, **kwargs): + workflow.add(const.STAGE_FIRST, 'status') + workflow.add_with_component(const.STAGE_FIRST, 'general', 'status_check') + workflow.add(const.STAGE_SECOND, 'connect', 'display') + plugin_context.return_true() From 8d5c2cdb170c39363aba97b7dca658d53119bbfc Mon Sep 17 00:00:00 2001 From: gaoyunshan Date: Fri, 25 Oct 2024 16:14:19 +0800 Subject: [PATCH 2/5] fix init home --- _cmd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_cmd.py b/_cmd.py index 7af42e11..041c31a4 100644 --- a/_cmd.py +++ b/_cmd.py @@ -197,7 +197,7 @@ def init_home(self): if not COMMAND_ENV.get(ENV.ENV_OBD_ID): COMMAND_ENV.set(ENV.ENV_OBD_ID, uuid()) if VERSION != version: - for part in ['workflows', 'config_parser', 'optimize', 'mirror/remote']: + for part in ['workflows', 'plugins', 'config_parser', 'optimize', 'mirror/remote']: obd_part_dir = os.path.join(self.OBD_PATH, part) if DirectoryUtil.mkdir(self.OBD_PATH): root_part_path = os.path.join(self.OBD_INSTALL_PATH, part) From 6de2b7a7b374d0614e879b6932e716d5a99746c2 Mon Sep 17 00:00:00 2001 From: gaoyunshan Date: Fri, 8 Nov 2024 10:42:11 +0800 Subject: [PATCH 3/5] opti workflow and add Dockerfile --- Docker/Dockerfile | 54 +++++++++++++++++ Docker/ob_build | 76 +++++++++++++++++++++++ Docker/python-env-activate.sh | 6 ++ _workflow.py | 76 +++++++++++++++++++++-- core.py | 110 ++++++++++++++++++++++++---------- rpm/ob-deploy.spec | 8 ++- 6 files changed, 291 insertions(+), 39 deletions(-) create mode 100644 Docker/Dockerfile create mode 100644 Docker/ob_build create mode 100755 Docker/python-env-activate.sh diff --git a/Docker/Dockerfile b/Docker/Dockerfile new file mode 100644 index 00000000..17080665 --- /dev/null +++ b/Docker/Dockerfile @@ -0,0 +1,54 @@ +FROM registry.openanolis.cn/openanolis/anolisos:8.9 +ENV PATH="/opt/miniconda/bin:$PATH" +ENV TZ=UTC-8 +ENV CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1 + +ADD Docker/ob_build /usr/bin/ +ADD Docker/python-env-activate.sh /usr/bin/py-env-activate + +RUN yum install -y wget \ + && wget -O /etc/yum.repos.d/epel.repo http://mirrors.aliyun.com/repo/epel-7.repo \ + && yum -y install libffi-devel bzip2-devel readline readline-devel jq which bison ncurses-devel libaio autoconf automake libtool perl-CPAN gettext-devel perl-devel openssl-devel zlib-devel curl-devel xz-devel \ + expat-devel asciidoc xmlto rpm-build cmake make gcc gcc-c++ python2-devel python3-devel sudo git mysql-devel \ + && yum clean all + +RUN wget https://repo.anaconda.com/miniconda/Miniconda2-py27_4.8.3-Linux-x86_64.sh --no-check-certificate\ + && bash Miniconda2-py27_4.8.3-Linux-x86_64.sh -p /opt/miniconda -b \ + && rm -rf Miniconda2-py27_4.8.3-Linux-x86_64.sh \ + && conda clean --all -y + +COPY requirements3.txt /workspaces/obd/requirements3.txt +COPY service/service-requirements.txt workspaces/obd/service-requirements.txt + +RUN conda create -n py27 -y python=2.7 \ + && source /opt/miniconda/bin/activate base \ + && pip config set global.index-url https://mirrors.aliyun.com/pypi/simple \ + && pip config set global.trusted-host mirrors.aliyun.com \ + && pip install pyinstaller==3.6 \ + && rm -rf ~/.cache/pip \ + && conda clean -a -y + +RUN conda create -n py38 -y python=3.8 \ + && source /opt/miniconda/bin/activate py38 \ + && pip config set global.index-url https://mirrors.aliyun.com/pypi/simple \ + && pip config set global.trusted-host mirrors.aliyun.com \ + && pip install -r /workspaces/obd/requirements3.txt \ + && pip install -r /workspaces/obd/service-requirements.txt \ + && pip install pyinstaller==3.6 \ + && pip cache purge \ + && echo "source /opt/miniconda/bin/activate py38" >> ~/.bashrc \ + && /opt/miniconda/bin/conda clean -a -y \ + && rm -rf /workspaces/obd + +# install nodejs and yarn +RUN wget https://rpm.nodesource.com/setup_16.x \ + && touch /etc/redhat-release \ + && bash setup_16.x \ + && rm -f /etc/redhat-release \ + && yum install -y nodejs \ + && yum clean all + +RUN npm install -g yarn \ + && yarn config delete registry --global \ + && yarn config set registry https://registry.npmmirror.com/ --global \ + && npm cache clean --force \ No newline at end of file diff --git a/Docker/ob_build b/Docker/ob_build new file mode 100644 index 00000000..bf35405f --- /dev/null +++ b/Docker/ob_build @@ -0,0 +1,76 @@ +#!/bin/bash +set -x +set -e + +if [ $# -lt 4 ]; then + echo "no enough parameters. Please provide project_name, version and release." + exit 1 +fi + +# Get system release +os_release=$(grep -Po '(?<=^ID=")[^"]*' /etc/os-release || true) + +if [ -z "$os_release" ]; then + os_release=$(grep -Po '^ID=\K[^ ]+' /etc/os-release) +fi + +set +e +source /etc/profile +set -e + +project_dir=$1 +project_name=$2 +version=$3 +release=$4 +rpm_work_dir=${5:-rpm} +ob_build_script=${project_name}-build.sh +ob_build_spec=${project_name}.spec +ob_build_deps=${project_name}.deps +cur_dir=`pwd` +echo "cur dir: $cur_dir" +# check rpm work dir +if [ ! -d "${cur_dir}/${rpm_work_dir}" ]; then + echo "rpm work dir (${rpm_work_dir}) missing! Please create ${rpm_work_dir} in source code dir and place the packaging related files in the ${rpm_work_dir} dir." + exit 1 +fi + +# trigger building +echo "trigger building, current dir: " +pwd +cd $rpm_work_dir +ABS_PATH=`pwd` + +if [[ x"$os_release" == x"alios" && x"$AONE_COMPATIBLE_AUTO_DEP_CREATE" == x"on" ]]; then + if [ -e "$ob_build_deps" ]; then + echo "execute dep_create for alios" + dep_create $ob_build_deps + echo "execute sw for alios" + sw + fi +fi + +if [ -e "$ob_build_script" ]; then + bash $ob_build_script $project_dir $project_name $version $release +elif [ -e "$ob_build_spec" ]; then + if [[ x"$os_release" == x"alios" ]]; then + rpm_create $ob_build_spec -v $version -r $release + else + TOP_DIR=".rpm_create" + RPM_MACROS=$HOME/.rpmmacros + if [ -e $RPM_MACROS ]; then + mv -f $RPM_MACROS $RPM_MACROS.bak + fi + + # prepare rpm build dirs + mkdir -p $TOP_DIR/BUILD + mkdir -p $TOP_DIR/RPMS + mkdir -p $TOP_DIR/SRPMS + + echo "%_topdir $ABS_PATH/$TOP_DIR" > $RPM_MACROS + rpmbuild -bb $ob_build_spec + find $TOP_DIR/ -name "*.rpm" -exec mv {} . 2>/dev/null \; + fi +else + echo "packaging files missing! Please provide $ob_build_script or $ob_build_spec" + exit 1 +fi \ No newline at end of file diff --git a/Docker/python-env-activate.sh b/Docker/python-env-activate.sh new file mode 100755 index 00000000..406e1b0c --- /dev/null +++ b/Docker/python-env-activate.sh @@ -0,0 +1,6 @@ +eval "$(conda shell.bash hook)" +if [[ -z $1 ]]; then + echo "Please input proper python env" + exit 1 +fi +conda activate $1 \ No newline at end of file diff --git a/_workflow.py b/_workflow.py index ec1b91ac..73f27da2 100644 --- a/_workflow.py +++ b/_workflow.py @@ -22,13 +22,14 @@ import os import sys +from copy import deepcopy from _manager import Manager from _plugin import ComponentPluginLoader, pyScriptPluginExec, PyScriptPluginLoader, PyScriptPlugin from tool import OrderedDict -class WorkflowsIter: +class WorkflowsIter(object): def __init__(self, workflows): self.workflows = workflows @@ -65,6 +66,9 @@ def __getitem__(self, component_name): self.workflows[component_name] = ComponentWorkflow(self.name, component_name) return self.workflows[component_name] + def __len__(self): + return len(self.workflows) + def __setitem__(self, component_name, component_workflow): if not isinstance(component_workflow, ComponentWorkflow): raise TypeError("%s must be a instance of ComponentWorkflow" % component_workflow.__class__.__name__) @@ -72,11 +76,39 @@ def __setitem__(self, component_name, component_workflow): raise ValueError("%s is not a %s workflow" % (component_workflow, self.name)) self.workflows[component_name] = component_workflow - def __call__(self, dpeloy_config): + def __call__(self, sorted_components): workflows = [ - self[component] for component in dpeloy_config.sorted_components + self[component] for component in sorted_components ] return WorkflowsIter(workflows) + + +class SubWorkflows(object): + + def __init__(self) -> None: + self.workflows = OrderedDict() + + def add(self, workflow): + if not isinstance(workflow, ComponentWorkflow): + raise TypeError("%s must be a instance of ComponentWorkflow" % workflow.__class__.__name__) + if workflow.name not in self.workflows: + self.workflows[workflow.name] = Workflows(workflow.name) + self.workflows[workflow.name][workflow.component_name] = workflow + + def __getitem__(self, workflow_name): + return self.workflows[workflow_name] + + def __iter__(self): + return iter(self.workflows.values()) + + +class SubWorkflowTemplate(object): + + def __init__(self, name, component_name, version=None, kwargs=None): + self.name = name + self.component_name = component_name + self.version = version + self.kwargs = kwargs or {} class PluginTemplate(object): @@ -94,6 +126,11 @@ def __init__(self, name, component_name): self.name = name self.component_name = component_name self.stage = {} + self.sub_workflow = {} + self.global_kwargs = {} + + def set_global_kwargs(self, **kwargs): + self.global_kwargs = kwargs def add(self, stage, *plugins): return self.add_with_kwargs(stage, None, *plugins) @@ -113,14 +150,45 @@ def add_with_component_version_kwargs(self, stage, component_name, version, kwar if stage not in self.stage: self.stage[stage] = plugins else: + if stage in self.sub_workflow: + raise Exception("stage %s already has a workflow" % stage) self.stage[stage] += plugins + def add_workflow(self, stage, workflow): + return self.add_workflow_with_kwargs(stage, None, workflow) + + def add_workflow_with_component(self, stage, component_name, workflow): + return self.add_workflow_with_component_version(stage, component_name, None, workflow) + + def add_workflow_with_component_version(self, stage, component_name, version, workflow): + return self.add_workflow_with_component_version_kwargs(stage, component_name, version, None, workflow) + + def add_workflow_with_kwargs(self, stage, kwargs, workflow): + return self.add_workflow_with_component_version_kwargs(stage, self.component_name, None, kwargs, workflow) + + def add_workflow_with_component_version_kwargs(self, stage, component_name, version, kwargs, workflow): + stage = int(stage) + workflow = SubWorkflowTemplate(workflow, component_name, version, kwargs) + if stage not in self.stage: + self.stage[stage] = [workflow] + self.sub_workflow[stage] = workflow + else: + raise Exception("stage %s already has a workflow" % stage) + @property def stages(self): return sorted(self.stage.keys()) def __getitem__(self, stage): - return self.stage.get(stage, []) + if self.global_kwargs: + stages = [] + for template in self.stage.get(stage, []): + template = deepcopy(template) + template.kwargs.update(self.global_kwargs) + stages.append(template) + return stages + else: + return self.stage.get(stage, []) class ComponentWorkflowLoader(ComponentPluginLoader): diff --git a/core.py b/core.py index 680019e8..b90aaa73 100644 --- a/core.py +++ b/core.py @@ -38,7 +38,7 @@ from _mirror import MirrorRepositoryManager, PackageInfo, RemotePackageInfo from _plugin import PluginManager, PluginType, InstallPlugin, PluginContextNamespace from _deploy import DeployManager, DeployStatus, DeployConfig, DeployConfigStatus, Deploy, ClusterStatus -from _workflow import WorkflowManager, Workflows +from _workflow import WorkflowManager, Workflows, SubWorkflowTemplate, SubWorkflows from _tool import Tool, ToolManager from _repository import RepositoryManager, LocalPackage, Repository, RepositoryVO import _errno as err @@ -183,10 +183,12 @@ def get_workflows(self, workflow_name, repositories=None, no_found_act='exit'): repositories = self.repositories workflows = Workflows(workflow_name) for repository in repositories: - workflows[repository.name] = self.get_workflow(repository, workflow_name, repository.name, repository.version, no_found_act=no_found_act) + template = self.get_workflow(repository, workflow_name, repository.name, repository.version, no_found_act=no_found_act) + if template: + workflows[repository.name] = template return workflows - def get_workflow(self, repository, workflow_name, component_name, version=0.1, no_found_act='exit'): + def get_workflow(self, repository, workflow_name, component_name, version=0.1, no_found_act='exit', **component_kwargs): if no_found_act == 'exit': no_found_exit = True else: @@ -195,41 +197,78 @@ def get_workflow(self, repository, workflow_name, component_name, version=0.1, n self._call_stdio('verbose', 'Searching %s template for components ...', workflow_name) template = self.workflow_manager.get_workflow_template(workflow_name, component_name, version) if template: - ret = self.call_workflow_template(template, repository) + ret = self.call_workflow_template(template, repository, **component_kwargs) if ret: self._call_stdio('verbose', 'Found for %s for %s-%s' % (template, template.component_name, template.version)) return ret if no_found_exit: - self._call_stdio('critical', 'No such %s template for %s-%s' % (template, template.component_name, template.version)) + self._call_stdio('critical', 'No such %s template for %s-%s' % (workflow_name, component_name, version)) exit(1) else: - self._call_stdio(msg_lv, 'No such %s template for %s-%s' % (template, template.component_name, template.version)) + self._call_stdio(msg_lv, 'No such %s template for %s-%s' % (workflow_name, component_name, version)) - def run_workflow(self, workflows, deploy_config=None, repositories=None, no_found_act='exit'): - if not deploy_config: - deploy_config = self.deploy.deploy_config + def run_workflow(self, workflows, sorted_components=[], repositories=None, no_found_act='exit', error_exit=True, **kwargs): + if not sorted_components and self.deploy: + sorted_components = self.deploy.deploy_config.sorted_components if not repositories: - repositories = self.repositories - repositories = {repository.name: repository for repository in repositories} - for stages in workflows(deploy_config): + repositories = self.repositories if self.repositories else [] + if not sorted_components: + sorted_components = [repository.name for repository in repositories] + + repositories_map = {repository.name: repository for repository in repositories} + for stages in workflows(sorted_components): + if not self.hanlde_sub_workflows(stages, sorted_components, repositories, no_found_act=no_found_act, **kwargs): + return False for component_name in stages: - for plugin_template in stages[component_name]: - if 'repository' in plugin_template.kwargs: - repository = plugin_template.kwargs['repository'] - del plugin_template.kwargs['repository'] - else: - if plugin_template.component_name in repositories: - repository = repositories[plugin_template.component_name] - else: - repository = repositories[component_name] - if not plugin_template.version: - if plugin_template.component_name in repositories: - plugin_template.version = repositories[component_name].version - else: - plugin_template.version = repository.version - plugin = self.search_py_script_plugin_by_template(plugin_template, no_found_act=no_found_act) - if plugin and not self.call_plugin(plugin, repository, **plugin_template.kwargs): - return False + for template in stages[component_name]: + if isinstance(template, SubWorkflowTemplate): + continue + if component_name in kwargs: + template.kwargs.update(kwargs[component_name]) + if not self.run_plugin_template(template, component_name, repositories_map, no_found_act=no_found_act) and error_exit: + return False + return True + + def hanlde_sub_workflows(self, stages, sorted_components, repositories, no_found_act='exit', **kwargs): + sub_workflows = SubWorkflows() + for repository in repositories: + component_name = repository.name + if component_name not in stages: + continue + for template in stages[component_name]: + if not isinstance(template, SubWorkflowTemplate): + continue + if component_name in kwargs: + template.kwargs.update(kwargs[component_name]) + version = template.version if template.version else repository.version + workflow = self.get_workflow(repository, template.name, template.component_name, version, no_found_act=no_found_act, **template.kwargs) + if workflow: + workflow.set_global_kwargs(**template.kwargs) + sub_workflows.add(workflow) + + for workflows in sub_workflows: + if not self.run_workflow(workflows, sorted_components, repositories, no_found_act=no_found_act, **kwargs): + return False + return True + + def run_plugin_template(self, plugin_template, component_name, repositories=None, no_found_act='exit', **kwargs): + if 'repository' in plugin_template.kwargs: + repository = plugin_template.kwargs['repository'] + del plugin_template.kwargs['repository'] + else: + if plugin_template.component_name in repositories: + repository = repositories[plugin_template.component_name] + else: + repository = repositories[component_name] + if not plugin_template.version: + if plugin_template.component_name in repositories: + plugin_template.version = repositories[component_name].version + else: + plugin_template.version = repository.version + plugin = self.search_py_script_plugin_by_template(plugin_template, no_found_act=no_found_act) + plugin_template.kwargs.update(kwargs) + if plugin and not self.call_plugin(plugin, repository, **plugin_template.kwargs): + return False return True def _init_call_args(self, repository, spacename=None, target_servers=None, **kwargs): @@ -245,7 +284,14 @@ def _init_call_args(self, repository, spacename=None, target_servers=None, **kwa 'cmd': self.cmds, 'options': self.options, 'stdio': self.stdio, - 'target_servers': target_servers + 'target_servers': target_servers, + 'mirror_manager': self.mirror_manager, + 'repository_manager': self.repository_manager, + 'plugin_manager': self.plugin_manager, + 'deploy_manager': self.deploy_manager, + 'lock_manager': self.lock_manager, + 'optimize_manager': self.optimize_manager, + 'tool_manager': self.tool_manager } if self.deploy: args['deploy_name'] = self.deploy.name @@ -258,9 +304,9 @@ def _init_call_args(self, repository, spacename=None, target_servers=None, **kwa args.update(kwargs) return args - def call_workflow_template(self, workflow_template, repository): + def call_workflow_template(self, workflow_template, repository, spacename=None, target_servers=None, **kwargs): self._call_stdio('verbose', 'Call workflow %s for %s' % (workflow_template, repository)) - args = self._init_call_args(repository, None, None, clients=None) + args = self._init_call_args(repository, spacename, target_servers, clients=None, **kwargs) return workflow_template(**args) def call_plugin(self, plugin, repository, spacename=None, target_servers=None, **kwargs): diff --git a/rpm/ob-deploy.spec b/rpm/ob-deploy.spec index ec77ab06..9f8e4201 100644 --- a/rpm/ob-deploy.spec +++ b/rpm/ob-deploy.spec @@ -70,12 +70,13 @@ mkdir -p ${RPM_BUILD_ROOT}/usr/obd pip install -r plugins-requirements3.txt --target=$BUILD_DIR/SOURCES/site-packages -i http://mirrors.aliyun.com/pypi/simple/ --trusted-host mirrors.aliyun.com pip install -r service/service-requirements.txt --target=$BUILD_DIR/SOURCES/site-packages -i http://mirrors.aliyun.com/pypi/simple/ --trusted-host mirrors.aliyun.com # pyinstaller -y --clean -n obd-web -p $BUILD_DIR/SOURCES/site-packages -F service/app.py -pyinstaller --hidden-import=decimal -p $BUILD_DIR/SOURCES/site-packages --hidden-import service/app.py --hidden-import=configparser --hidden-import=Crypto.Hash.SHA --hidden-import=Crypto.PublicKey.RSA --hidden-import=Crypto.Signature.PKCS1_v1_5 --hidden-import=Crypto.Cipher.PKCS1_OAEP -F obd.py +pyinstaller --hidden-import=decimal -p $BUILD_DIR/SOURCES/site-packages --hidden-import service.app --hidden-import=configparser --hidden-import=Crypto.Hash.SHA --hidden-import=Crypto.PublicKey.RSA --hidden-import=Crypto.Signature.PKCS1_v1_5 --hidden-import=Crypto.Cipher.PKCS1_OAEP -F obd.py rm -f obd.py obd.spec \mkdir -p $BUILD_DIR/SOURCES/web \cp -rf $SRC_DIR/dist/obd ${RPM_BUILD_ROOT}/usr/bin/obd \cp -rf $SRC_DIR/web/dist $BUILD_DIR/SOURCES/web \cp -rf $SRC_DIR/plugins $BUILD_DIR/SOURCES/plugins +\cp -rf $SRC_DIR/workflows $BUILD_DIR/SOURCES/workflows \cp -rf $SRC_DIR/optimize $BUILD_DIR/SOURCES/optimize \cp -rf $SRC_DIR/example $BUILD_DIR/SOURCES/example \cp -rf $SRC_DIR/config_parser $BUILD_DIR/SOURCES/config_parser @@ -85,6 +86,7 @@ rm -f obd.py obd.spec \cp -rf $SRC_DIR/mirror/ $BUILD_DIR/SOURCES/ \cp -rf $BUILD_DIR/SOURCES/web ${RPM_BUILD_ROOT}/usr/obd/ \cp -rf $BUILD_DIR/SOURCES/plugins ${RPM_BUILD_ROOT}/usr/obd/ +\cp -rf $BUILD_DIR/SOURCES/workflows ${RPM_BUILD_ROOT}/usr/obd/ \cp -rf $BUILD_DIR/SOURCES/optimize ${RPM_BUILD_ROOT}/usr/obd/ \cp -rf $BUILD_DIR/SOURCES/config_parser ${RPM_BUILD_ROOT}/usr/obd/ \cp -rf $BUILD_DIR/SOURCES/mirror ${RPM_BUILD_ROOT}/usr/obd/ @@ -95,8 +97,8 @@ mkdir -p ${RPM_BUILD_ROOT}/usr/obd/lib/ mkdir -p ${RPM_BUILD_ROOT}/usr/obd/lib/executer \cp -rf ${RPM_DIR}/executer27 ${RPM_BUILD_ROOT}/usr/obd/lib/executer/ \cp -rf $BUILD_DIR/SOURCES/example ${RPM_BUILD_ROOT}/usr/obd/ -cd ${RPM_BUILD_ROOT}/usr/obd/plugins && ln -s oceanbase oceanbase-ce && \cp -rf obproxy/* obproxy-ce/ && \cp -rf $SRC_DIR/plugins/obproxy-ce/* obproxy-ce/ -cd ${RPM_BUILD_ROOT}/usr/obd/plugins && ln -sf ocp-server ocp-server-ce +cd ${RPM_BUILD_ROOT}/usr/obd/plugins && ln -s oceanbase oceanbase-ce && ln -sf ocp-server ocp-server-ce && \cp -rf obproxy/* obproxy-ce/ && \cp -rf $SRC_DIR/plugins/obproxy-ce/* obproxy-ce/ +cd ${RPM_BUILD_ROOT}/usr/obd/workflows && ln -s oceanbase oceanbase-ce && ln -sf ocp-server ocp-server-ce && \cp -rf obproxy/* obproxy-ce/ && \cp -rf $SRC_DIR/workflows/obproxy-ce/* obproxy-ce/ mv obproxy/3.1.0 obproxy/3.2.1 cd ${RPM_BUILD_ROOT}/usr/obd/config_parser && ln -s oceanbase oceanbase-ce cd ${RPM_BUILD_ROOT}/usr/obd/optimize && ln -s obproxy obproxy-ce From c983dfd75496ef99e99193c661ab0cdd8671cfd9 Mon Sep 17 00:00:00 2001 From: gaoyunshan Date: Fri, 8 Nov 2024 11:10:26 +0800 Subject: [PATCH 4/5] fix ob-deploy.spec --- rpm/ob-deploy.spec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rpm/ob-deploy.spec b/rpm/ob-deploy.spec index 9f8e4201..3dc12f81 100644 --- a/rpm/ob-deploy.spec +++ b/rpm/ob-deploy.spec @@ -98,7 +98,7 @@ mkdir -p ${RPM_BUILD_ROOT}/usr/obd/lib/executer \cp -rf ${RPM_DIR}/executer27 ${RPM_BUILD_ROOT}/usr/obd/lib/executer/ \cp -rf $BUILD_DIR/SOURCES/example ${RPM_BUILD_ROOT}/usr/obd/ cd ${RPM_BUILD_ROOT}/usr/obd/plugins && ln -s oceanbase oceanbase-ce && ln -sf ocp-server ocp-server-ce && \cp -rf obproxy/* obproxy-ce/ && \cp -rf $SRC_DIR/plugins/obproxy-ce/* obproxy-ce/ -cd ${RPM_BUILD_ROOT}/usr/obd/workflows && ln -s oceanbase oceanbase-ce && ln -sf ocp-server ocp-server-ce && \cp -rf obproxy/* obproxy-ce/ && \cp -rf $SRC_DIR/workflows/obproxy-ce/* obproxy-ce/ +cd ${RPM_BUILD_ROOT}/usr/obd/workflows && ln -s oceanbase oceanbase-ce && ln -sf ocp-server ocp-server-ce && \cp -rf obproxy/ obproxy-ce/ && \cp -rf $SRC_DIR/workflows/obproxy-ce/* obproxy-ce/ mv obproxy/3.1.0 obproxy/3.2.1 cd ${RPM_BUILD_ROOT}/usr/obd/config_parser && ln -s oceanbase oceanbase-ce cd ${RPM_BUILD_ROOT}/usr/obd/optimize && ln -s obproxy obproxy-ce From 3f64d05acf396b9684f5ba70e2e0bec67519d920 Mon Sep 17 00:00:00 2001 From: gaoyunshan Date: Fri, 8 Nov 2024 13:51:05 +0800 Subject: [PATCH 5/5] fix ob-deploy.spec --- rpm/ob-deploy.spec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rpm/ob-deploy.spec b/rpm/ob-deploy.spec index 3dc12f81..9b76bef9 100644 --- a/rpm/ob-deploy.spec +++ b/rpm/ob-deploy.spec @@ -98,7 +98,7 @@ mkdir -p ${RPM_BUILD_ROOT}/usr/obd/lib/executer \cp -rf ${RPM_DIR}/executer27 ${RPM_BUILD_ROOT}/usr/obd/lib/executer/ \cp -rf $BUILD_DIR/SOURCES/example ${RPM_BUILD_ROOT}/usr/obd/ cd ${RPM_BUILD_ROOT}/usr/obd/plugins && ln -s oceanbase oceanbase-ce && ln -sf ocp-server ocp-server-ce && \cp -rf obproxy/* obproxy-ce/ && \cp -rf $SRC_DIR/plugins/obproxy-ce/* obproxy-ce/ -cd ${RPM_BUILD_ROOT}/usr/obd/workflows && ln -s oceanbase oceanbase-ce && ln -sf ocp-server ocp-server-ce && \cp -rf obproxy/ obproxy-ce/ && \cp -rf $SRC_DIR/workflows/obproxy-ce/* obproxy-ce/ +cd ${RPM_BUILD_ROOT}/usr/obd/workflows && ln -s oceanbase oceanbase-ce && ln -sf ocp-server ocp-server-ce && \cp -rf obproxy/ obproxy-ce/ mv obproxy/3.1.0 obproxy/3.2.1 cd ${RPM_BUILD_ROOT}/usr/obd/config_parser && ln -s oceanbase oceanbase-ce cd ${RPM_BUILD_ROOT}/usr/obd/optimize && ln -s obproxy obproxy-ce