Publicaciones
2015 |
L.O. Avila, E.C. Martínez A grid-based tool for optimal performance monitoring of an artificial pancreas (Conferencia) 49 , 2015, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @conference{Avila2015850, title = {A grid-based tool for optimal performance monitoring of an artificial pancreas}, author = { L.O. Avila and E.C. Martínez}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84925263420&partnerID=40&md5=6a2c4e38ad8fc45b3da067117f706b1c}, doi = {10.1007/978-3-319-13117-7_216}, year = {2015}, date = {2015-01-01}, journal = {IFMBE Proceedings}, volume = {49}, pages = {850-853}, abstract = {Due to its safety-critical condition, continuous performance monitoring of an artificial pancreas (AP) is of paramount importance for both patients and health care personnel. Based on error grid analysis (EGA), a monitoring tool is proposed to assess if a given control policy implementation respects the specification of an optimally controlled glycemic system under uncertainty. The optimal behavior specification is obtained using Linearly Solvable Markov Decision Processes (LSMDP) whereby the Bellman equation is made linear through an exponential transformation such that the optimal control policy is obtained in an explicit form. The system specification is learned using Gaussian processes for state transitions in a well-performing glucose regulator. © Springer International Publishing Switzerland 2015.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {conference} } Due to its safety-critical condition, continuous performance monitoring of an artificial pancreas (AP) is of paramount importance for both patients and health care personnel. Based on error grid analysis (EGA), a monitoring tool is proposed to assess if a given control policy implementation respects the specification of an optimally controlled glycemic system under uncertainty. The optimal behavior specification is obtained using Linearly Solvable Markov Decision Processes (LSMDP) whereby the Bellman equation is made linear through an exponential transformation such that the optimal control policy is obtained in an explicit form. The system specification is learned using Gaussian processes for state transitions in a well-performing glucose regulator. © Springer International Publishing Switzerland 2015. |
S. Syafiie, F. Tadeo, E.C. Martínez Coordinated control of wastewater oxidation processes under constrained incremental control (Conferencia) 2015, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @conference{Syafiie20155348, title = {Coordinated control of wastewater oxidation processes under constrained incremental control}, author = { S. Syafiie and F. Tadeo and E.C. Martínez}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84927725578&partnerID=40&md5=dd131d33006df3496ae54b6d899a1cd2}, year = {2015}, date = {2015-01-01}, journal = {2007 European Control Conference, ECC 2007}, pages = {5348-5353}, abstract = {This article presents a practical solution to effective control of oxidation processes by coordinating multiple control agents. The proposed methodology of agent coordination is based on Model-Free Learning Control (MFLC), based on Reinforcement Learning (RL) framework. The dynamical system is learnt from the online interaction between agent and the process. From the interaction, the agent provides possible actions, which are satisfying incremental, input-output constraints. The possible actions are defined as a function of symbolic states, which in turn varies with the distance to a goal state. The application on an oxidation process at the laboratory level shows that the proposed MFLC learns to control adequately the process. © 2007 EUCA.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {conference} } This article presents a practical solution to effective control of oxidation processes by coordinating multiple control agents. The proposed methodology of agent coordination is based on Model-Free Learning Control (MFLC), based on Reinforcement Learning (RL) framework. The dynamical system is learnt from the online interaction between agent and the process. From the interaction, the agent provides possible actions, which are satisfying incremental, input-output constraints. The possible actions are defined as a function of symbolic states, which in turn varies with the distance to a goal state. The application on an oxidation process at the laboratory level shows that the proposed MFLC learns to control adequately the process. © 2007 EUCA. |
M. De Paula, G.G. Acosta, E.C. Martínez On-line policy learning and adaptation for real-time personalization of an artificial pancreas (Artículo de revista) Expert Systems with Applications, 42 (4), pp. 2234-2255, 2015, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{DePaula20152234, title = {On-line policy learning and adaptation for real-time personalization of an artificial pancreas}, author = { M. De Paula and G.G. Acosta and E.C. Martínez}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84910646834&partnerID=40&md5=224032cbf5204e66e33af9f25b2b43bd}, doi = {10.1016/j.eswa.2014.10.038}, year = {2015}, date = {2015-01-01}, journal = {Expert Systems with Applications}, volume = {42}, number = {4}, pages = {2234-2255}, abstract = {The dynamic complexity of the glucose-insulin metabolism in diabetic patients is the main obstacle towards widespread use of an artificial pancreas. The significant level of subject-specific glycemic variability requires continuously adapting the control policy to successfully face daily changes in patient's metabolism and lifestyle. In this paper, an on-line selective reinforcement learning algorithm that enables real-time adaptation of a control policy based on ongoing interactions with the patient so as to tailor the artificial pancreas is proposed. Adaptation includes two online procedures: on-line sparsification and parameter updating of the Gaussian process used to approximate the control policy. With the proposed sparsification method, the support data dictionary for on-line learning is modified by checking if in the arriving data stream there exists novel information to be added to the dictionary in order to personalize the policy. Results obtained in silico experiments demonstrate that on-line policy learning is both safe and efficient for maintaining blood glucose variability within the normoglycemic range. © 2014 Elsevier Ltd. All rights reserved.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } The dynamic complexity of the glucose-insulin metabolism in diabetic patients is the main obstacle towards widespread use of an artificial pancreas. The significant level of subject-specific glycemic variability requires continuously adapting the control policy to successfully face daily changes in patient's metabolism and lifestyle. In this paper, an on-line selective reinforcement learning algorithm that enables real-time adaptation of a control policy based on ongoing interactions with the patient so as to tailor the artificial pancreas is proposed. Adaptation includes two online procedures: on-line sparsification and parameter updating of the Gaussian process used to approximate the control policy. With the proposed sparsification method, the support data dictionary for on-line learning is modified by checking if in the arriving data stream there exists novel information to be added to the dictionary in order to personalize the policy. Results obtained in silico experiments demonstrate that on-line policy learning is both safe and efficient for maintaining blood glucose variability within the normoglycemic range. © 2014 Elsevier Ltd. All rights reserved. |
M.F. Luna, E.C. Martínez Run-to-Run Optimization of Biodiesel Production using Probabilistic Tendency Models: A Simulation Study (Artículo de revista) Canadian Journal of Chemical Engineering, 93 (9), pp. 1613-1623, 2015, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Luna20151613, title = {Run-to-Run Optimization of Biodiesel Production using Probabilistic Tendency Models: A Simulation Study}, author = { M.F. Luna and E.C. Martínez}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84939252185&partnerID=40&md5=4884031edbe8a2f0f3f4bbda99316c86}, doi = {10.1002/cjce.22249}, year = {2015}, date = {2015-01-01}, journal = {Canadian Journal of Chemical Engineering}, volume = {93}, number = {9}, pages = {1613-1623}, abstract = {Variability of the composition and properties of raw materials used for biodiesel production may cause a loss of productivity, since the same operating conditions give rise to different yields for alternative feedstock sources. The capability to re-optimize the process when the raw materials change may lead to a significant improvement in productivity. For yield optimization, first-principles models of a biodiesel reactor have limited prediction capabilities due to the complex kinetics involving transesterification and saponification reactions, which demands active learning of relevant data through optimal design of experiments. In this work, a Bayesian approach for integrating experimentation with imperfect models is proposed to optimize biodiesel production on a run-to-run basis. Parameter distributions in a probabilistic tendency model for the transesterification of triglycerides are re-estimated using data from a sequence of experiments designed to guide policy improvement. Global sensitivity analysis is used to formulate the optimal sampling strategy in each dynamic experiment as an optimization problem. Results obtained highlight that, even when there are significant errors in the tendency model structure and reduced information content in samples, a significant increase in biodiesel production can be achieved after a handful of runs. © 2015 Canadian Society for Chemical Engineering.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } Variability of the composition and properties of raw materials used for biodiesel production may cause a loss of productivity, since the same operating conditions give rise to different yields for alternative feedstock sources. The capability to re-optimize the process when the raw materials change may lead to a significant improvement in productivity. For yield optimization, first-principles models of a biodiesel reactor have limited prediction capabilities due to the complex kinetics involving transesterification and saponification reactions, which demands active learning of relevant data through optimal design of experiments. In this work, a Bayesian approach for integrating experimentation with imperfect models is proposed to optimize biodiesel production on a run-to-run basis. Parameter distributions in a probabilistic tendency model for the transesterification of triglycerides are re-estimated using data from a sequence of experiments designed to guide policy improvement. Global sensitivity analysis is used to formulate the optimal sampling strategy in each dynamic experiment as an optimization problem. Results obtained highlight that, even when there are significant errors in the tendency model structure and reduced information content in samples, a significant increase in biodiesel production can be achieved after a handful of runs. © 2015 Canadian Society for Chemical Engineering. |
L. Avila, E.C. Martínez An active inference approach to on-line agent monitoring in safety-critical systems (Artículo de revista) Advanced Engineering Informatics, 2015, (cited By 0; Article in Press). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Avila2015, title = {An active inference approach to on-line agent monitoring in safety-critical systems}, author = { L. Avila and E.C. Martínez}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84939857508&partnerID=40&md5=2ae4e73294ef57c28170e77b9449480c}, doi = {10.1016/j.aei.2015.07.008}, year = {2015}, date = {2015-01-01}, journal = {Advanced Engineering Informatics}, abstract = {The current trend towards integrating software agents in safety-critical systems such as drones, autonomous cars and medical devices, which must operate in uncertain environments, gives rise to the need of on-line detection of an unexpected behavior. In this work, on-line monitoring is carried out by comparing environmental state transitions with prior beliefs descriptive of optimal behavior. The agent policy is computed analytically using linearly solvable Markov decision processes. Active inference using prior beliefs allows a monitor proactively rehearsing on-line future agent actions over a rolling horizon so as to generate expectations to discover surprising behaviors. A Bayesian surprise metric is proposed based on twin Gaussian processes to measure the difference between prior and posterior beliefs about state transitions in the agent environment. Using a sliding window of sampled data, beliefs are updated a posteriori by comparing a sequence of state transitions with the ones predicted using the optimal policy. An artificial pancreas for diabetic patients is used as a representative example. © 2015 Elsevier Ltd.}, note = {cited By 0; Article in Press}, keywords = {}, pubstate = {published}, tppubtype = {article} } The current trend towards integrating software agents in safety-critical systems such as drones, autonomous cars and medical devices, which must operate in uncertain environments, gives rise to the need of on-line detection of an unexpected behavior. In this work, on-line monitoring is carried out by comparing environmental state transitions with prior beliefs descriptive of optimal behavior. The agent policy is computed analytically using linearly solvable Markov decision processes. Active inference using prior beliefs allows a monitor proactively rehearsing on-line future agent actions over a rolling horizon so as to generate expectations to discover surprising behaviors. A Bayesian surprise metric is proposed based on twin Gaussian processes to measure the difference between prior and posterior beliefs about state transitions in the agent environment. Using a sliding window of sampled data, beliefs are updated a posteriori by comparing a sequence of state transitions with the ones predicted using the optimal policy. An artificial pancreas for diabetic patients is used as a representative example. © 2015 Elsevier Ltd. |
M. De Paula, L.O. Ávila, E.C. Martínez Controlling blood glucose variability under uncertainty using reinforcement learning and Gaussian processes (Artículo de revista) Applied Soft Computing Journal, 35 , pp. 310-332, 2015, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{DePaula2015310, title = {Controlling blood glucose variability under uncertainty using reinforcement learning and Gaussian processes}, author = { M. De Paula and L.O. Ávila and E.C. Martínez}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84937399738&partnerID=40&md5=1b44a6fe08ca16aec4097c27d0ad9e2a}, doi = {10.1016/j.asoc.2015.06.041}, year = {2015}, date = {2015-01-01}, journal = {Applied Soft Computing Journal}, volume = {35}, pages = {310-332}, abstract = {Abstract Automated control of blood glucose (BG) concentration with a fully automated artificial pancreas will certainly improve the quality of life for insulin-dependent patients. Closed-loop insulin delivery is challenging due to inter- and intra-patient variability, errors in glucose sensors and delays in insulin absorption. Responding to the varying activity levels seen in outpatients, with unpredictable and unreported food intake, and providing the necessary personalized control for individuals is a challenging task for existing control algorithms. A novel approach for controlling glycemic variability using simulation-based learning is presented. A policy iteration algorithm that combines reinforcement learning with Gaussian process approximation is proposed. To account for multiple sources of uncertainty, a control policy is learned off-line using an Ito's stochastic model of the glucose-insulin dynamics. For safety and performance, only relevant data are sampled through Bayesian active learning. Results obtained demonstrate that a generic policy is both safe and efficient for controlling subject-specific variability due to a patient's lifestyle and its distinctive metabolic response. © 2015 Elsevier B.V.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } Abstract Automated control of blood glucose (BG) concentration with a fully automated artificial pancreas will certainly improve the quality of life for insulin-dependent patients. Closed-loop insulin delivery is challenging due to inter- and intra-patient variability, errors in glucose sensors and delays in insulin absorption. Responding to the varying activity levels seen in outpatients, with unpredictable and unreported food intake, and providing the necessary personalized control for individuals is a challenging task for existing control algorithms. A novel approach for controlling glycemic variability using simulation-based learning is presented. A policy iteration algorithm that combines reinforcement learning with Gaussian process approximation is proposed. To account for multiple sources of uncertainty, a control policy is learned off-line using an Ito's stochastic model of the glucose-insulin dynamics. For safety and performance, only relevant data are sampled through Bayesian active learning. Results obtained demonstrate that a generic policy is both safe and efficient for controlling subject-specific variability due to a patient's lifestyle and its distinctive metabolic response. © 2015 Elsevier B.V. |
E. Reynares, M.L. Caliusco, M.R. Galli A set of ontology design patterns for reengineering SBVR statements into OWL/SWRL ontologies (Artículo de revista) Expert Systems with Applications, 42 (5), pp. 2680-2690, 2015, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Reynares20152680, title = {A set of ontology design patterns for reengineering SBVR statements into OWL/SWRL ontologies}, author = { E. Reynares and M.L. Caliusco and M.R. Galli}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84919346783&partnerID=40&md5=74ccb9eed5c8005f2c16bd18dac32c1c}, doi = {10.1016/j.eswa.2014.11.012}, year = {2015}, date = {2015-01-01}, journal = {Expert Systems with Applications}, volume = {42}, number = {5}, pages = {2680-2690}, abstract = {The interest in the use of ontologies for creating more intelligent and effective enterprise information systems has increased considerably in recent years. The most critical aspects during the development of these systems are: (1) to identify the ontology concepts and (2) to make explicit the business rules by means of the ontology axioms. In order to address these issues, mappings of business rules expressions to ontology statements based on different languages were proposed. Despite the efforts made in this area, some work remain to be done. This work presents a set of ontology design patterns providing a way to obtain an OWL/SWRL ontology by applying metamodel transformation rules over the SBVR specification of a business domain. Patterns are rooted in the structural specification of the standards, providing a set of mappings readily usable for business people or developers concerned with the implementation of a mapping tool. Moreover, translations from SBVR to SWRL language are presented in order to fill the gap in the expressive power of SBVR and OWL. The theoretical expressions of patterns are illustrated by means of an example depicting the core structure of a fictitious company. © 2014 Elsevier Ltd.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } The interest in the use of ontologies for creating more intelligent and effective enterprise information systems has increased considerably in recent years. The most critical aspects during the development of these systems are: (1) to identify the ontology concepts and (2) to make explicit the business rules by means of the ontology axioms. In order to address these issues, mappings of business rules expressions to ontology statements based on different languages were proposed. Despite the efforts made in this area, some work remain to be done. This work presents a set of ontology design patterns providing a way to obtain an OWL/SWRL ontology by applying metamodel transformation rules over the SBVR specification of a business domain. Patterns are rooted in the structural specification of the standards, providing a set of mappings readily usable for business people or developers concerned with the implementation of a mapping tool. Moreover, translations from SBVR to SWRL language are presented in order to fill the gap in the expressive power of SBVR and OWL. The theoretical expressions of patterns are illustrated by means of an example depicting the core structure of a fictitious company. © 2014 Elsevier Ltd. |
M.A. Sosa, D.A. Figueroa Paredes, J.C. Basílico, B. Van Der Bruggen, J. Espinosa Screening of pervaporation membranes with the aid of conceptual models: An application to bioethanol production (Artículo de revista) Separation and Purification Technology, 146 , pp. 326-341, 2015, (cited By 1). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Sosa2015326, title = {Screening of pervaporation membranes with the aid of conceptual models: An application to bioethanol production}, author = { M.A. Sosa and D.A. Figueroa Paredes and J.C. Basílico and B. Van Der Bruggen and J. Espinosa}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84927729473&partnerID=40&md5=aca444275e50254768e57464027f2fe6}, doi = {10.1016/j.seppur.2015.04.001}, year = {2015}, date = {2015-01-01}, journal = {Separation and Purification Technology}, volume = {146}, pages = {326-341}, abstract = {In this paper, we assess the performance of a given hydrophobic membrane from the conceptual design of a hybrid process formed by the hydrophobic membrane itself and the separation train located downstream. To this end, a single pervaporation experiment with a model ethanol-water mixture is needed to estimate the minimum area requirement of the hydrophobic membrane. Short-cut methods, on the other hand, can be used to estimate the minimum number of stages and reflux ratio of the distillation column. Estimation of the minimum area requirement for a hydrophilic membrane, which is considered to overcome the azeotropic composition, requires the integration of a spatially one-dimensional isothermal mass transfer model of the unit until the desired biofuel purity is achieved in the corresponding retentate stream. The idea behind the approach is that the performance of a given membrane must be measured taking into account the overall hybrid process given that the hydrophobic membrane itself performs only a part of the desired separation. The hybrid process is then assessed on the basis of a cost estimate using the minimum membrane areas of the two membrane units together with minimum number of stages and minimum reflux ratio of the distillation column among other structural and operating variables. The outcome allows for the screening of pervaporation membranes, and yields valuable insights into the nature of the process as well as the constraints that a hybrid process may face. Membranes can be assessed based on their overall process performance by this method; only the subset of membranes presenting the best economic figures can be considered for a further analysis. © 2015 Elsevier B.V. All rights reserved.}, note = {cited By 1}, keywords = {}, pubstate = {published}, tppubtype = {article} } In this paper, we assess the performance of a given hydrophobic membrane from the conceptual design of a hybrid process formed by the hydrophobic membrane itself and the separation train located downstream. To this end, a single pervaporation experiment with a model ethanol-water mixture is needed to estimate the minimum area requirement of the hydrophobic membrane. Short-cut methods, on the other hand, can be used to estimate the minimum number of stages and reflux ratio of the distillation column. Estimation of the minimum area requirement for a hydrophilic membrane, which is considered to overcome the azeotropic composition, requires the integration of a spatially one-dimensional isothermal mass transfer model of the unit until the desired biofuel purity is achieved in the corresponding retentate stream. The idea behind the approach is that the performance of a given membrane must be measured taking into account the overall hybrid process given that the hydrophobic membrane itself performs only a part of the desired separation. The hybrid process is then assessed on the basis of a cost estimate using the minimum membrane areas of the two membrane units together with minimum number of stages and minimum reflux ratio of the distillation column among other structural and operating variables. The outcome allows for the screening of pervaporation membranes, and yields valuable insights into the nature of the process as well as the constraints that a hybrid process may face. Membranes can be assessed based on their overall process performance by this method; only the subset of membranes presenting the best economic figures can be considered for a further analysis. © 2015 Elsevier B.V. All rights reserved. |
D.S. Laoretani, O.A. Iribarren Recycling vs. reprocessing. Optimization of a gossypol production process (Artículo de revista) Chemical Engineering Research and Design, 100 , pp. 135-147, 2015, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Laoretani2015135, title = {Recycling vs. reprocessing. Optimization of a gossypol production process}, author = { D.S. Laoretani and O.A. Iribarren}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84938220035&partnerID=40&md5=8809e950670ea5baa760cef6bce2b77d}, doi = {10.1016/j.cherd.2015.05.017}, year = {2015}, date = {2015-01-01}, journal = {Chemical Engineering Research and Design}, volume = {100}, pages = {135-147}, abstract = {Three processes to obtain GAA from the soapstock residue of oil refineries were compared in this work. The first process is the originally proposed by Dowd and Pelitire (2001 Ind. Crops Prod. 14, 113) in which the mother liquor from the last crystallization step still contains an appreciable amount of gossypol which is lost as a process residual stream. The second process recycles the mother liquor to the hydrolysis first step of the process, following the heuristic of the traditional process design procedure by Douglas (1988. Conceptual Design of Chemical Processes. McGraw Hill, New York, NY), which increases product yield. While the third process adds a new downstream processing line to reprocess the mother liquor. This last alternative renders a slightly lower product yield than Process 2 but requires a smaller investment cost, exhibiting the best economic performance.The alternative of incorporating a recycle to reprocess unreacted material (in this case the bound gossypol present in the crystallization mother liquor) is the usual approach in traditional process design. However, in this particular study case, it does not succeed in rendering the process alternative with the best economic performance: the recycle stream flow impacts on the equipment sizes, increasing investment cost far beyond the alternative that adds smaller units to reprocess the mother liquor stream. © 2015 .}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } Three processes to obtain GAA from the soapstock residue of oil refineries were compared in this work. The first process is the originally proposed by Dowd and Pelitire (2001 Ind. Crops Prod. 14, 113) in which the mother liquor from the last crystallization step still contains an appreciable amount of gossypol which is lost as a process residual stream. The second process recycles the mother liquor to the hydrolysis first step of the process, following the heuristic of the traditional process design procedure by Douglas (1988. Conceptual Design of Chemical Processes. McGraw Hill, New York, NY), which increases product yield. While the third process adds a new downstream processing line to reprocess the mother liquor. This last alternative renders a slightly lower product yield than Process 2 but requires a smaller investment cost, exhibiting the best economic performance.The alternative of incorporating a recycle to reprocess unreacted material (in this case the bound gossypol present in the crystallization mother liquor) is the usual approach in traditional process design. However, in this particular study case, it does not succeed in rendering the process alternative with the best economic performance: the recycle stream flow impacts on the equipment sizes, increasing investment cost far beyond the alternative that adds smaller units to reprocess the mother liquor stream. © 2015 . |
E. Fernández, C.M. Toledo, M.R. Galli, E. Salomone, O.J. Chiotti Agent-based monitoring service for management of disruptive events in supply chains (Artículo de revista) Computers in Industry, 70 , pp. 89-101, 2015, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Fernández201589, title = {Agent-based monitoring service for management of disruptive events in supply chains}, author = { E. Fernández and C.M. Toledo and M.R. Galli and E. Salomone and O.J. Chiotti}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84928468924&partnerID=40&md5=17988915ca283ae89b5a32039caf5604}, doi = {10.1016/j.compind.2015.01.009}, year = {2015}, date = {2015-01-01}, journal = {Computers in Industry}, volume = {70}, pages = {89-101}, abstract = {Schedules of supply chains are generated with buffers to absorb the effect of disruptive events that could occur during their execution. Schedules can be systematically repaired through specific modifications within buffers by using appropriate decision models that consider the distributed nature of a supply chain. To this aim, information of disruptive events at occurrence or in advance allows decision models to make better decisions. To detect and predict disruptive events along a schedule execution, a service-oriented monitoring subsystem that uses a reference model for defining monitoring models was proposed. This subsystem offers services for collecting execution data of a schedule and environment data, and assessing them to detect/anticipate disruptive events. Because of the distributed nature and the complexity of these services functionalities, this paper presents an agent-based approach for their implementation. This technology allows dealing with supply chain monitoring by structuring monitoring subsystem functionalities as a set of autonomous entities. These entities are able to perform tailored plans created at execution time to concurrently monitor different schedules. A case study is described to try out the implemented prototype system. © 2015 Elsevier B.V. All rights reserved.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } Schedules of supply chains are generated with buffers to absorb the effect of disruptive events that could occur during their execution. Schedules can be systematically repaired through specific modifications within buffers by using appropriate decision models that consider the distributed nature of a supply chain. To this aim, information of disruptive events at occurrence or in advance allows decision models to make better decisions. To detect and predict disruptive events along a schedule execution, a service-oriented monitoring subsystem that uses a reference model for defining monitoring models was proposed. This subsystem offers services for collecting execution data of a schedule and environment data, and assessing them to detect/anticipate disruptive events. Because of the distributed nature and the complexity of these services functionalities, this paper presents an agent-based approach for their implementation. This technology allows dealing with supply chain monitoring by structuring monitoring subsystem functionalities as a set of autonomous entities. These entities are able to perform tailored plans created at execution time to concurrently monitor different schedules. A case study is described to try out the implemented prototype system. © 2015 Elsevier B.V. All rights reserved. |
L.J.R. Stroppi, O.J. Chiotti, P.D. Villarreal Defining the resource perspective in the development of processes-aware information systems (Artículo de revista) Information and Software Technology, 59 , pp. 86-108, 2015, (cited By 1). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Stroppi201586, title = {Defining the resource perspective in the development of processes-aware information systems}, author = { L.J.R. Stroppi and O.J. Chiotti and P.D. Villarreal}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84921046811&partnerID=40&md5=10e0dd735e2c6ca77a96fb7dd088b1b7}, doi = {10.1016/j.infsof.2014.10.006}, year = {2015}, date = {2015-01-01}, journal = {Information and Software Technology}, volume = {59}, pages = {86-108}, abstract = {Context The resource perspective has impact on the performance of business processes. However, current Workflow Management Systems (WfMSs) provide disparate support to its implementation and business process modeling languages provide limited capabilities for its definition. Thus, it is difficult to specify requirements regarding this perspective and to select an appropriate WfMS to support them in order to obtain a technological solution aligned with the organizational needs. Objective To provide support to the definition, implementation, verification and validation of resource perspective requirements in the development of Process-Aware Information Systems (PAISs) based on WfMSs. Method The following activities were carried out: (i) identification of resource perspective aspects in executable workflow specifications, (ii) analysis of the elements provided by the BPMN modeling language to represent these aspects, (iii) development of a framework based on BPMN for defining and implementing these aspects by using the extension mechanism provided by this language, (iv) development of a model-driven development method that leverages the framework to develop PAISs, and (v) validation of the proposed framework and method through the development of a tool supporting them, a case study, and the evaluation against the Workflow Resource Patterns. Results A framework, a method and a tool that support the definition of the resource perspective in the development of PAISs. Conclusion By using the proposed framework and method, practitioners are able to: define the resource perspective requirements in conceptual process models, select a WfMS as implementation platform, and define the implementation of these requirements maintaining the consistency between the conceptual process models and the workflow specifications. © 2014 Elsevier B.V. All rights reserved.}, note = {cited By 1}, keywords = {}, pubstate = {published}, tppubtype = {article} } Context The resource perspective has impact on the performance of business processes. However, current Workflow Management Systems (WfMSs) provide disparate support to its implementation and business process modeling languages provide limited capabilities for its definition. Thus, it is difficult to specify requirements regarding this perspective and to select an appropriate WfMS to support them in order to obtain a technological solution aligned with the organizational needs. Objective To provide support to the definition, implementation, verification and validation of resource perspective requirements in the development of Process-Aware Information Systems (PAISs) based on WfMSs. Method The following activities were carried out: (i) identification of resource perspective aspects in executable workflow specifications, (ii) analysis of the elements provided by the BPMN modeling language to represent these aspects, (iii) development of a framework based on BPMN for defining and implementing these aspects by using the extension mechanism provided by this language, (iv) development of a model-driven development method that leverages the framework to develop PAISs, and (v) validation of the proposed framework and method through the development of a tool supporting them, a case study, and the evaluation against the Workflow Resource Patterns. Results A framework, a method and a tool that support the definition of the resource perspective in the development of PAISs. Conclusion By using the proposed framework and method, practitioners are able to: define the resource perspective requirements in conceptual process models, select a WfMS as implementation platform, and define the implementation of these requirements maintaining the consistency between the conceptual process models and the workflow specifications. © 2014 Elsevier B.V. All rights reserved. |
M.S. Sonzini, M. Vegetti, H.P. Leone Towards an ontology for product version management (Artículo de revista) International Journal of Product Lifecycle Management, 8 (1), pp. 80-97, 2015, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Sonzini201580, title = {Towards an ontology for product version management}, author = { M.S. Sonzini and M. Vegetti and H.P. Leone}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84931270284&partnerID=40&md5=b8ff22ffdee2ae2c498ff889c346cffd}, year = {2015}, date = {2015-01-01}, journal = {International Journal of Product Lifecycle Management}, volume = {8}, number = {1}, pages = {80-97}, abstract = {During its lifecycle, products are affected by market, technology, and user requirements. Without a process for efficiently handling product changes, product data, which is spread in different areas and systems, might become unusable, incomplete, or inconsistent. A simple change on product information may trigger a domino effect that could be very difficult to control. In order to reduce this effect, knowledge is important to answer what, when, why, and how a change occurred. This article proposes an ontology that allows capturing product changes in order to answer the aforementioned questions. The proposed ontology extends PRoductONTOlogy (PRONTO) (Vegetti et al., 2011) to represent product family changes. OWL implementation of the proposed ontology is presented with a simple case study to validate it. Copyright © 2015 Inderscience Enterprises Ltd.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } During its lifecycle, products are affected by market, technology, and user requirements. Without a process for efficiently handling product changes, product data, which is spread in different areas and systems, might become unusable, incomplete, or inconsistent. A simple change on product information may trigger a domino effect that could be very difficult to control. In order to reduce this effect, knowledge is important to answer what, when, why, and how a change occurred. This article proposes an ontology that allows capturing product changes in order to answer the aforementioned questions. The proposed ontology extends PRoductONTOlogy (PRONTO) (Vegetti et al., 2011) to represent product family changes. OWL implementation of the proposed ontology is presented with a simple case study to validate it. Copyright © 2015 Inderscience Enterprises Ltd. |
M. Vegetti, G. Henning An Ontological Approach to Integration of Planning and Scheduling Activities in Batch Process Industries (Artículo de revista) Computer Aided Chemical Engineering, 37 , pp. 995-1000, 2015, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Vegetti2015995, title = {An Ontological Approach to Integration of Planning and Scheduling Activities in Batch Process Industries}, author = { M. Vegetti and G. Henning}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84940571215&partnerID=40&md5=bf6f2fc20d6e70c001570bc7dd52edc4}, doi = {10.1016/B978-0-444-63577-8.50011-5}, year = {2015}, date = {2015-01-01}, journal = {Computer Aided Chemical Engineering}, volume = {37}, pages = {995-1000}, abstract = {In the last decades, the integration of informatic applications supporting planning, scheduling and control has been a serious concern of the industrial community. Many standards have been developed to tackle this issue by addressing the exchange of data between the scheduling function and its immediate lower and upper levels in the planning pyramid. However, a more comprehensive approach is required to tackle integration problems, since this matter entails much more than data exchange. So, this article presents an ontological framework that provides the foundations to reach an effective interoperability among the various applications linked to scheduling activities. © 2015 Elsevier B.V.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } In the last decades, the integration of informatic applications supporting planning, scheduling and control has been a serious concern of the industrial community. Many standards have been developed to tackle this issue by addressing the exchange of data between the scheduling function and its immediate lower and upper levels in the planning pyramid. However, a more comprehensive approach is required to tackle integration problems, since this matter entails much more than data exchange. So, this article presents an ontological framework that provides the foundations to reach an effective interoperability among the various applications linked to scheduling activities. © 2015 Elsevier B.V. |
M.L. Cunico, A.R. Vecchietti Fuzzy programming model for procurement management under delivery shortage (Artículo de revista) Industrial and Engineering Chemistry Research, 54 (16), pp. 4616-4624, 2015, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Cunico20154616, title = {Fuzzy programming model for procurement management under delivery shortage}, author = { M.L. Cunico and A.R. Vecchietti}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84929494437&partnerID=40&md5=c07860491290cb27e7578a058f79b8d9}, doi = {10.1021/ie504962p}, year = {2015}, date = {2015-01-01}, journal = {Industrial and Engineering Chemistry Research}, volume = {54}, number = {16}, pages = {4616-4624}, abstract = {This paper proposes a FMIP (fuzzy mixed integer program) to model the procurement process of a manufacturing company which contemplates uncertainty in the delivery of raw materials. The focus of this article lies on the use of fuzzy sets to represent the percentages of failure in the delivery of the amount of materials requested and include it in a mathematical model as an evaluation measure in the performance of each supplier. The main objective of the proposed model is to select the most promising suppliers in order to optimize the quantitative and qualitative performance of the company, by maximizing the net present value (NPV) and providing a better customer service, respectively, in relation with the commitment of delivery of the company's suppliers. To solve the problem raised, the FMIP model proposed is transformed into an equivalent MILP (mixed integer linear program), and then, several scenarios are solved. An illustrative example is presented to show the utility of the model. © 2015 American Chemical Society.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } This paper proposes a FMIP (fuzzy mixed integer program) to model the procurement process of a manufacturing company which contemplates uncertainty in the delivery of raw materials. The focus of this article lies on the use of fuzzy sets to represent the percentages of failure in the delivery of the amount of materials requested and include it in a mathematical model as an evaluation measure in the performance of each supplier. The main objective of the proposed model is to select the most promising suppliers in order to optimize the quantitative and qualitative performance of the company, by maximizing the net present value (NPV) and providing a better customer service, respectively, in relation with the commitment of delivery of the company's suppliers. To solve the problem raised, the FMIP model proposed is transformed into an equivalent MILP (mixed integer linear program), and then, several scenarios are solved. An illustrative example is presented to show the utility of the model. © 2015 American Chemical Society. |
J. Flores, J.M. Montagna, A.R. Vecchietti Investment planning in energy considering economic and environmental objectives (Artículo de revista) Computers and Chemical Engineering, 72 , pp. 222-232, 2015, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Flores2015222, title = {Investment planning in energy considering economic and environmental objectives}, author = { J. Flores and J.M. Montagna and A.R. Vecchietti}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84908456566&partnerID=40&md5=141e6432aee5d521fae3e597cf16677a}, doi = {10.1016/j.compchemeng.2014.05.006}, year = {2015}, date = {2015-01-01}, journal = {Computers and Chemical Engineering}, volume = {72}, pages = {222-232}, abstract = {This work proposes a linear disjunctive multiperiod optimization model for planning investments in energy sources considering two objectives, one economical (maximization of the net present value), and the other environmental (minimization of greenhouse gas emissions - GHG). The general goal of this approach is to provide an analysis tool for energy decision makers in planning investment considering different scenarios in GHG emanation. The decision variables of the model are the investment needs in money, capacity and time in order to satisfy 100% of the energy market for Argentina in the period 2010-2030. Two models are proposed, the first one considers the total amount of GHG released in the horizon time; and the other contemplates the amount of GHG year by year. Twenty scenarios are evaluated with both models. The results obtained are presented, which show the trade-offs between both objectives. © 2014 Elsevier Ltd.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } This work proposes a linear disjunctive multiperiod optimization model for planning investments in energy sources considering two objectives, one economical (maximization of the net present value), and the other environmental (minimization of greenhouse gas emissions - GHG). The general goal of this approach is to provide an analysis tool for energy decision makers in planning investment considering different scenarios in GHG emanation. The decision variables of the model are the investment needs in money, capacity and time in order to satisfy 100% of the energy market for Argentina in the period 2010-2030. Two models are proposed, the first one considers the total amount of GHG released in the horizon time; and the other contemplates the amount of GHG year by year. Twenty scenarios are evaluated with both models. The results obtained are presented, which show the trade-offs between both objectives. © 2014 Elsevier Ltd. |
M.C. Vidoni, A.R. Vecchietti An intelligent agent for ERP's data structure analysis based on ANSI/ISA-95 standard (Artículo de revista) Computers in Industry, 73 , pp. 39-50, 2015, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Vidoni201539, title = {An intelligent agent for ERP's data structure analysis based on ANSI/ISA-95 standard}, author = { M.C. Vidoni and A.R. Vecchietti}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84941992386&partnerID=40&md5=fff52440f07394552ee22498aee9560f}, doi = {10.1016/j.compind.2015.07.011}, year = {2015}, date = {2015-01-01}, journal = {Computers in Industry}, volume = {73}, pages = {39-50}, abstract = {This paper presents an intelligent agent to analyze the ERP's (Enterprise Resource Planning) system data structure and its compliance on the ANSI/ISA-95 standard. The knowledge base of the agent is generated using the manufacturing categories information provided by mentioned standard. The approach proposes an infrastructure of a knowledge-based agent that interacts with the database of an ERP system, in order to classify the information of ERP's database tables according to the standard. Several study cases are evaluated and the results obtained are shown in different graphs. This is a first step to improve the interoperability between an Advanced Planning and Scheduling (APS) system that needs to be integrated with ERP's especially in manufacturing and production companies. © 2015 Elsevier B.V. All rights reserved.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } This paper presents an intelligent agent to analyze the ERP's (Enterprise Resource Planning) system data structure and its compliance on the ANSI/ISA-95 standard. The knowledge base of the agent is generated using the manufacturing categories information provided by mentioned standard. The approach proposes an infrastructure of a knowledge-based agent that interacts with the database of an ERP system, in order to classify the information of ERP's database tables according to the standard. Several study cases are evaluated and the results obtained are shown in different graphs. This is a first step to improve the interoperability between an Advanced Planning and Scheduling (APS) system that needs to be integrated with ERP's especially in manufacturing and production companies. © 2015 Elsevier B.V. All rights reserved. |
2014 |
M.F. Luna, E.C. Martínez A Bayesian approach to run-to-run optimization of animal cell bioreactors using probabilistic tendency models (Artículo de revista) Industrial and Engineering Chemistry Research, 53 (44), pp. 17252-17266, 2014, (cited By 1). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Luna201417252, title = {A Bayesian approach to run-to-run optimization of animal cell bioreactors using probabilistic tendency models}, author = { M.F. Luna and E.C. Martínez}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84910066011&partnerID=40&md5=91bc806979ebcdb7739b9288523e50c4}, doi = {10.1021/ie500453e}, year = {2014}, date = {2014-01-01}, journal = {Industrial and Engineering Chemistry Research}, volume = {53}, number = {44}, pages = {17252-17266}, abstract = {Increasing demand for recombinant proteins (including monoclonal antibodies) where time to market is critical could benefit from the use of model-based optimization of cell viability and productivity. Owing to the complexity of metabolic regulation, unstructured models of animal cell cultures typically have built-in errors (structural and parametric uncertainty) which give rise to the need for obtaining relevant data through experimental design in modeling for optimization. A Bayesian optimization strategy which integrates tendency models with iterative policy learning is proposed. Parameter distributions in a probabilistic model of bioreactor performance are re-estimated using data from experiments designed for maximizing information content and productivity. Results obtained highlight that experimental design for run-to-run optimization using a probabilistic tendency model is effective to maximize biomass growth even though significant model uncertainty is present. A hybrid cybernetic model of a myeloma cell culture coconsuming glucose and glutamine is used to simulate data to demonstrate the efficacy of the proposed approach. © 2014 American Chemical Society.}, note = {cited By 1}, keywords = {}, pubstate = {published}, tppubtype = {article} } Increasing demand for recombinant proteins (including monoclonal antibodies) where time to market is critical could benefit from the use of model-based optimization of cell viability and productivity. Owing to the complexity of metabolic regulation, unstructured models of animal cell cultures typically have built-in errors (structural and parametric uncertainty) which give rise to the need for obtaining relevant data through experimental design in modeling for optimization. A Bayesian optimization strategy which integrates tendency models with iterative policy learning is proposed. Parameter distributions in a probabilistic model of bioreactor performance are re-estimated using data from experiments designed for maximizing information content and productivity. Results obtained highlight that experimental design for run-to-run optimization using a probabilistic tendency model is effective to maximize biomass growth even though significant model uncertainty is present. A hybrid cybernetic model of a myeloma cell culture coconsuming glucose and glutamine is used to simulate data to demonstrate the efficacy of the proposed approach. © 2014 American Chemical Society. |
L. Avila, E.C. Martínez Behavior monitoring under uncertainty using Bayesian surprise and optimal action selection (Artículo de revista) Expert Systems with Applications, 41 (14), pp. 6327-6345, 2014, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Avila20146327, title = {Behavior monitoring under uncertainty using Bayesian surprise and optimal action selection}, author = { L. Avila and E.C. Martínez}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84901470525&partnerID=40&md5=f3a3e55edcf90a574184a1819f15a6da}, doi = {10.1016/j.eswa.2014.04.031}, year = {2014}, date = {2014-01-01}, journal = {Expert Systems with Applications}, volume = {41}, number = {14}, pages = {6327-6345}, abstract = {The increasing trend towards delegating tasks to autonomous artificial agents in safety-critical socio-technical systems makes monitoring an action selection policy of paramount importance. Agent behavior monitoring may profit from a stochastic specification of an optimal policy under uncertainty. A probabilistic monitoring approach is proposed to assess if an agent behavior (or policy) respects its specification. The desired policy is modeled by a prior distribution for state transitions in an optimally-controlled stochastic process. Bayesian surprise is defined as the Kullback-Leibler divergence between the state transition distribution for the observed behavior and the distribution for optimal action selection. To provide a sensitive on-line estimation of Bayesian surprise with small samples twin Gaussian processes are used. Timely detection of a deviant behavior or anomaly in an artificial pancreas highlights the sensitivity of Bayesian surprise to a meaningful discrepancy regarding the stochastic optimal policy when there exist excessive glycemic variability, sensor errors, controller ill-tuning and infusion pump malfunctioning. To reject outliers and leave out redundant information, on-line sparsification of data streams is proposed. © 2014 Elsevier Ltd. All rights reserved.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } The increasing trend towards delegating tasks to autonomous artificial agents in safety-critical socio-technical systems makes monitoring an action selection policy of paramount importance. Agent behavior monitoring may profit from a stochastic specification of an optimal policy under uncertainty. A probabilistic monitoring approach is proposed to assess if an agent behavior (or policy) respects its specification. The desired policy is modeled by a prior distribution for state transitions in an optimally-controlled stochastic process. Bayesian surprise is defined as the Kullback-Leibler divergence between the state transition distribution for the observed behavior and the distribution for optimal action selection. To provide a sensitive on-line estimation of Bayesian surprise with small samples twin Gaussian processes are used. Timely detection of a deviant behavior or anomaly in an artificial pancreas highlights the sensitivity of Bayesian surprise to a meaningful discrepancy regarding the stochastic optimal policy when there exist excessive glycemic variability, sensor errors, controller ill-tuning and infusion pump malfunctioning. To reject outliers and leave out redundant information, on-line sparsification of data streams is proposed. © 2014 Elsevier Ltd. All rights reserved. |
Y. Albernas-Carvajal, G. Corsano, M. Morales-Zamora, M. González-Cortés, R. Santos-Herrero, E. González-Suárez Optimal design for an ethanol plant combining first and second-generation technologies (Artículo de revista) CTyF - Ciencia, Tecnologia y Futuro, 5 (5), pp. 97-120, 2014, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Albernas-Carvajal201497, title = {Optimal design for an ethanol plant combining first and second-generation technologies}, author = { Y. Albernas-Carvajal and G. Corsano and M. Morales-Zamora and M. González-Cortés and R. Santos-Herrero and E. González-Suárez}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84929463082&partnerID=40&md5=ba815b7c8b3d3dd87ab404dca137ce9b}, year = {2014}, date = {2014-01-01}, journal = {CTyF - Ciencia, Tecnologia y Futuro}, volume = {5}, number = {5}, pages = {97-120}, abstract = {The synthesis and optimal design of batch plants is addressed in this study. It was applied to the technology of conventional ethanol production in a Cuban distillery using the product of enzymatic hydrolysis of pretreated bagasse as another sugared substrate, starting from laboratory results. The optimal configuration of stages, the number of units in each stage, the unit sizes and minimum total production cost are obtained from the global optimization model and the proposed superstructure. This global model is a mixed integer nonlinear programming (MINLP) formulation, which is represented and resolved by the Professional Software, General Algebraic Modeling System (GAMS) version 23.5 applying DICOPT Solver. Different scenarios are analyzed: attaching pretreatment and enzymatic hydrolysis of bagasse to a conventional distillery plant, selling ethanol, or selling the furfural as by-product if there is a guaranteed market. With this, an actual net present value (VNA) of USD 44'893 358.7 and 1.51 years of Payback Period (PP) are obtained. © 2014, Ecopetrol S.A. All rights reserved.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } The synthesis and optimal design of batch plants is addressed in this study. It was applied to the technology of conventional ethanol production in a Cuban distillery using the product of enzymatic hydrolysis of pretreated bagasse as another sugared substrate, starting from laboratory results. The optimal configuration of stages, the number of units in each stage, the unit sizes and minimum total production cost are obtained from the global optimization model and the proposed superstructure. This global model is a mixed integer nonlinear programming (MINLP) formulation, which is represented and resolved by the Professional Software, General Algebraic Modeling System (GAMS) version 23.5 applying DICOPT Solver. Different scenarios are analyzed: attaching pretreatment and enzymatic hydrolysis of bagasse to a conventional distillery plant, selling ethanol, or selling the furfural as by-product if there is a guaranteed market. With this, an actual net present value (VNA) of USD 44'893 358.7 and 1.51 years of Payback Period (PP) are obtained. © 2014, Ecopetrol S.A. All rights reserved. |
Y. Albernas-Carvajal, G. Corsano, V.V. Kafarov, M. González Cortés, E. González Suárez Optimal design of pre-fermentation and fermentation stages applying nonlinear programming (Artículo de revista) Energy Conversion and Management, 87 , pp. 1195-1201, 2014, (cited By 2). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Albernas-Carvajal20141195, title = {Optimal design of pre-fermentation and fermentation stages applying nonlinear programming}, author = { Y. Albernas-Carvajal and G. Corsano and V.V. Kafarov and M. González Cortés and E. González Suárez}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84908699431&partnerID=40&md5=cf8c750c992c71241741ddb0f68d695b}, doi = {10.1016/j.enconman.2014.04.011}, year = {2014}, date = {2014-01-01}, journal = {Energy Conversion and Management}, volume = {87}, pages = {1195-1201}, abstract = {In the present work, the optimal design of pre-fermentation and fermentation operations for ethanol production is obtained developing a superstructure mathematical model. Different configurations of both operations are simultaneously considered in an overall model which also includes detailed kinetics equations. The zero wait is the transfer policy selected for these stages for ensuring the quality of these operations, given the nature and characteristics of microbiological sugary substrates. From the overall proposed model, the optimal configuration of the stages, the number of duplicated units in each stage, the size of each process unit, the process variables as concentrations and flows, and the total investment and production cost are obtained. This model is formulated as a non-linear programming problem, which is solved by the Professional Software, General Algebraic Modeling System (GAMS) with the application of CONOPT solver. The optimal design and operation of pre-fermentation and fermentation stages are obtained and the attained results are compared with the structures in conventional distillery. © 2014 Elsevier Ltd.}, note = {cited By 2}, keywords = {}, pubstate = {published}, tppubtype = {article} } In the present work, the optimal design of pre-fermentation and fermentation operations for ethanol production is obtained developing a superstructure mathematical model. Different configurations of both operations are simultaneously considered in an overall model which also includes detailed kinetics equations. The zero wait is the transfer policy selected for these stages for ensuring the quality of these operations, given the nature and characteristics of microbiological sugary substrates. From the overall proposed model, the optimal configuration of the stages, the number of duplicated units in each stage, the size of each process unit, the process variables as concentrations and flows, and the total investment and production cost are obtained. This model is formulated as a non-linear programming problem, which is solved by the Professional Software, General Algebraic Modeling System (GAMS) with the application of CONOPT solver. The optimal design and operation of pre-fermentation and fermentation stages are obtained and the attained results are compared with the structures in conventional distillery. © 2014 Elsevier Ltd. |
E. Reynares, M.L. Caliusco, M.R. Galli Approaching the feasibility of SBVR as modeling language for ontology development: An exploratory experiment (Artículo de revista) Expert Systems with Applications, 41 (4 PART 2), pp. 1576-1583, 2014, (cited By 3). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Reynares20141576, title = {Approaching the feasibility of SBVR as modeling language for ontology development: An exploratory experiment}, author = { E. Reynares and M.L. Caliusco and M.R. Galli}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84888376259&partnerID=40&md5=d0c0b97dec32e5b88a39215fd56963a5}, doi = {10.1016/j.eswa.2013.08.054}, year = {2014}, date = {2014-01-01}, journal = {Expert Systems with Applications}, volume = {41}, number = {4 PART 2}, pages = {1576-1583}, abstract = {Ontology development techniques still constitute an open research area despite its importance in semantic aware information systems. Until now, most methods have used UML in supporting ontology development process. Recent works propose the mapping of business rules expressions to ontology statements as a building technique by means of SBVR language. However, there is still no experimental research comparing such approaches. Aim of this work is to evaluate the feasibility of mapping business domain expressions to ontology statements. An exploratory experiment comparing performance of techniques based on UML and SBVR languages is presented. Comparison is rooted in the quality assessment of the ontologies developed by 10 equally sized groups randomly conformed by 30 undergraduate engineering students and applying such techniques. Developed ontologies largely outperform the minimally acceptable quality, according to the considered quality assessment framework. There is no statistical significant difference between the quality scores of the ontologies developed by means of UML and SBVR techniques, in any of the assessed quality dimensions. The feasibility of mapping business domain expressions to ontology statements is shown: ontologies developed by means of a SBVR based approach at least equate the quality of ontologies developed by using an UML based method. Results confirm previous research about the effectiveness of UML approaches for conceptualizing lightweight ontologies while stressing the potential of the SBVR language to express complex notions of a domain of interest. The potential of SBVR to OWL 2 mappings as an ontology development technique worthy of further study is highlighted. © 2013 Elsevier Ltd. All rights reserved.}, note = {cited By 3}, keywords = {}, pubstate = {published}, tppubtype = {article} } Ontology development techniques still constitute an open research area despite its importance in semantic aware information systems. Until now, most methods have used UML in supporting ontology development process. Recent works propose the mapping of business rules expressions to ontology statements as a building technique by means of SBVR language. However, there is still no experimental research comparing such approaches. Aim of this work is to evaluate the feasibility of mapping business domain expressions to ontology statements. An exploratory experiment comparing performance of techniques based on UML and SBVR languages is presented. Comparison is rooted in the quality assessment of the ontologies developed by 10 equally sized groups randomly conformed by 30 undergraduate engineering students and applying such techniques. Developed ontologies largely outperform the minimally acceptable quality, according to the considered quality assessment framework. There is no statistical significant difference between the quality scores of the ontologies developed by means of UML and SBVR techniques, in any of the assessed quality dimensions. The feasibility of mapping business domain expressions to ontology statements is shown: ontologies developed by means of a SBVR based approach at least equate the quality of ontologies developed by using an UML based method. Results confirm previous research about the effectiveness of UML approaches for conceptualizing lightweight ontologies while stressing the potential of the SBVR language to express complex notions of a domain of interest. The potential of SBVR to OWL 2 mappings as an ontology development technique worthy of further study is highlighted. © 2013 Elsevier Ltd. All rights reserved. |
E. Reynares, M.L. Caliusco, M.R. Galli Empirical evaluation of business rules mappings for ontology development [Evaluación empírica del mapeo de reglas de negocio para el desarrollo de ontologías] (Artículo de revista) RISTI - Revista Iberica de Sistemas e Tecnologias de Informacao, 2014 (14), pp. 83-99, 2014, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Reynares201483, title = {Empirical evaluation of business rules mappings for ontology development [Evaluación empírica del mapeo de reglas de negocio para el desarrollo de ontologías]}, author = { E. Reynares and M.L. Caliusco and M.R. Galli}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84923018018&partnerID=40&md5=e61e15eeae513725e1f38447e145445b}, doi = {10.17013/risti.14.83-99}, year = {2014}, date = {2014-01-01}, journal = {RISTI - Revista Iberica de Sistemas e Tecnologias de Informacao}, volume = {2014}, number = {14}, pages = {83-99}, abstract = {The methodologies for the development of ontologies aimed to be used as software artifacts remain to be an open research area. Although the most widely recognized are rooted on UML given the acceptance of that language in the community of software engineering, none of them are fully mature. Recent works propose the mapping of SBVR expressions to ontology statements as a building technique, but just an empirical study comparing SBVR and UML-rooted techniques has been performed until now. This work analyzes the technical feasibility of SBVR to OWL 2 mappings by depicting a first differentiated replication of the original experiment varying the treatments, the size and composition of the experimental units, and comparing the performance of SBVR and ODM-based techniques. The findings allow sustaining the technical feasibility of the SBVR to OWL 2 mappings as an ontology development technique able to be applied by the software engineering community.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } The methodologies for the development of ontologies aimed to be used as software artifacts remain to be an open research area. Although the most widely recognized are rooted on UML given the acceptance of that language in the community of software engineering, none of them are fully mature. Recent works propose the mapping of SBVR expressions to ontology statements as a building technique, but just an empirical study comparing SBVR and UML-rooted techniques has been performed until now. This work analyzes the technical feasibility of SBVR to OWL 2 mappings by depicting a first differentiated replication of the original experiment varying the treatments, the size and composition of the experimental units, and comparing the performance of SBVR and ODM-based techniques. The findings allow sustaining the technical feasibility of the SBVR to OWL 2 mappings as an ontology development technique able to be applied by the software engineering community. |
A.C. Tolaba, M.L. Caliusco, M.R. Galli A knowledge representation of geographic information using an ontologies-based approach [Representación del conocimiento de la información geográfica siguiendo un enfoque basado en ontologías] (Artículo de revista) RISTI - Revista Iberica de Sistemas e Tecnologias de Informacao, 2014 (14), pp. 101-116, 2014, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Tolaba2014101, title = {A knowledge representation of geographic information using an ontologies-based approach [Representación del conocimiento de la información geográfica siguiendo un enfoque basado en ontologías]}, author = { A.C. Tolaba and M.L. Caliusco and M.R. Galli}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84922993480&partnerID=40&md5=7f7da3ad29337de264b66fd9cd067149}, doi = {10.17013/risti.14.101-116}, year = {2014}, date = {2014-01-01}, journal = {RISTI - Revista Iberica de Sistemas e Tecnologias de Informacao}, volume = {2014}, number = {14}, pages = {101-116}, abstract = {Nowadays, the use of Geographic Information is inherent in many application domains in various disciplines. Given the potential of geographic information, it is an important tool to facilitate decision making and resource management. There are different standards for representation through conceptual models of geographic information. However, these have limitation for both incorporate semantic expressiveness, since only considered the technical aspect, to properly model applications that use geographic information, because they do not consider all the concepts involved in the geospatial domain, their characteristics and relationships. This paper presents a meta-ontology that creates rich semantic models able to represent and make inferences about the knowledge of geographic information.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } Nowadays, the use of Geographic Information is inherent in many application domains in various disciplines. Given the potential of geographic information, it is an important tool to facilitate decision making and resource management. There are different standards for representation through conceptual models of geographic information. However, these have limitation for both incorporate semantic expressiveness, since only considered the technical aspect, to properly model applications that use geographic information, because they do not consider all the concepts involved in the geospatial domain, their characteristics and relationships. This paper presents a meta-ontology that creates rich semantic models able to represent and make inferences about the knowledge of geographic information. |
C.D. Fischer, O.A. Iribarren Hydrogen recovery from the purge stream of a cyclohexane production process using a mass exchange heuristic (Artículo de revista) International Journal of Hydrogen Energy, 39 (35), pp. 20094-20104, 2014, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Fischer201420094, title = {Hydrogen recovery from the purge stream of a cyclohexane production process using a mass exchange heuristic}, author = { C.D. Fischer and O.A. Iribarren}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84912569318&partnerID=40&md5=c7386a0c53e3e0c265c2f0985ff545aa}, doi = {10.1016/j.ijhydene.2014.10.042}, year = {2014}, date = {2014-01-01}, journal = {International Journal of Hydrogen Energy}, volume = {39}, number = {35}, pages = {20094-20104}, abstract = {In this paper, we propose a novel hydrogen recovery structure in a cyclohexane production process, arrived at by following a mass exchange heuristic developed in previous works [1,2]. In the cyclohexane production process considered, we explore the effect of process design variables and find the optimal design for a mass exchange between the purge stream and the feed of benzene to the process. We compare our results with a conventional process design lacking hydrogen recovery from the purge, and with a process design which implements a conventional membrane recovery system at the optimum setup of the decision variables. The process with recovery through mass exchange here proposed obtained a respectable 7.12% reduction of hydrogen consumption, resulting in an increase of the plant Net Annual Income of a 4.24% respect to the conventional process design without recovery. These figures are similar to the ones that result from implementing a process design with a conventional membrane recovery system. These results highlight the importance of considering a mass exchange between the process feed and purge stream as an alternative design of the recovery system. © 2014 Hydrogen Energy Publications, LLC. All rights reserved.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } In this paper, we propose a novel hydrogen recovery structure in a cyclohexane production process, arrived at by following a mass exchange heuristic developed in previous works [1,2]. In the cyclohexane production process considered, we explore the effect of process design variables and find the optimal design for a mass exchange between the purge stream and the feed of benzene to the process. We compare our results with a conventional process design lacking hydrogen recovery from the purge, and with a process design which implements a conventional membrane recovery system at the optimum setup of the decision variables. The process with recovery through mass exchange here proposed obtained a respectable 7.12% reduction of hydrogen consumption, resulting in an increase of the plant Net Annual Income of a 4.24% respect to the conventional process design without recovery. These figures are similar to the ones that result from implementing a process design with a conventional membrane recovery system. These results highlight the importance of considering a mass exchange between the process feed and purge stream as an alternative design of the recovery system. © 2014 Hydrogen Energy Publications, LLC. All rights reserved. |
D.S. Laoretani, O.A. Iribarren Procedure for the selection among technologies. Treatment of deodorizer distillate oil (Artículo de revista) Industrial and Engineering Chemistry Research, 53 (43), pp. 16803-16812, 2014, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Laoretani201416803, title = {Procedure for the selection among technologies. Treatment of deodorizer distillate oil}, author = { D.S. Laoretani and O.A. Iribarren}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84908577128&partnerID=40&md5=b5da4e1f76dd9ae9dd3c18a0b0157154}, doi = {10.1021/ie500211u}, year = {2014}, date = {2014-01-01}, journal = {Industrial and Engineering Chemistry Research}, volume = {53}, number = {43}, pages = {16803-16812}, abstract = {With the aim of both adding value by recovering tocopherols from a natural source and promoting environmental care, this work studies how to select among available technological alternatives for the processing of deodorizer distillate oil (DDO), which is a residue of the edible oils refining industry. The work focuses on how to generate a first set of promising alternatives (we propose to follow an established process design procedure based on heuristics, combined with a screening of the literature, with criteria to narrow the large number of alternatives published). The final selection among them is an established approach: we propose to implement a multiobjective optimization mixed integer linear program maximizing the net present value (NPV) and minimizing the generation of greenhouse gases measured as kilogram-equivalent of CO2. For a given case study of soybean DDO the first step generated a set of six technologies for the treatment of DDO with different processing capacities plus two additional alternatives for the final destination of DDO. The Pareto set of solutions constructed with the results provides information to adopt a both economic and environmentally sound choice of a processing technology. For the particular case analyzed, the technology that maximizes NPV within the Pareto set of solutions was esterification of free fatty acids with ethanol in acid medium followed by a separation of the esters by molecular distillation, at the largest production capacity (576,000 kg/year). This technology gives the maximum NPV of $19,574,000 generating 5,142,500 kg of CO2-equiv. The results obtained are useful for decision making in the industry, to give an adequate final destination to the residue DDO. © 2014 American Chemical Society.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } With the aim of both adding value by recovering tocopherols from a natural source and promoting environmental care, this work studies how to select among available technological alternatives for the processing of deodorizer distillate oil (DDO), which is a residue of the edible oils refining industry. The work focuses on how to generate a first set of promising alternatives (we propose to follow an established process design procedure based on heuristics, combined with a screening of the literature, with criteria to narrow the large number of alternatives published). The final selection among them is an established approach: we propose to implement a multiobjective optimization mixed integer linear program maximizing the net present value (NPV) and minimizing the generation of greenhouse gases measured as kilogram-equivalent of CO2. For a given case study of soybean DDO the first step generated a set of six technologies for the treatment of DDO with different processing capacities plus two additional alternatives for the final destination of DDO. The Pareto set of solutions constructed with the results provides information to adopt a both economic and environmentally sound choice of a processing technology. For the particular case analyzed, the technology that maximizes NPV within the Pareto set of solutions was esterification of free fatty acids with ethanol in acid medium followed by a separation of the esters by molecular distillation, at the largest production capacity (576,000 kg/year). This technology gives the maximum NPV of $19,574,000 generating 5,142,500 kg of CO2-equiv. The results obtained are useful for decision making in the industry, to give an adequate final destination to the residue DDO. © 2014 American Chemical Society. |
M. Rico, M.L. Caliusco, O.J. Chiotti, M.R. Galli OntoQualitas: A framework for ontology quality assessment in information interchanges between heterogeneous systems (Artículo de revista) Computers in Industry, 2014, (cited By 0; Article in Press). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Rico2014, title = {OntoQualitas: A framework for ontology quality assessment in information interchanges between heterogeneous systems}, author = { M. Rico and M.L. Caliusco and O.J. Chiotti and M.R. Galli}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84906803934&partnerID=40&md5=db8647636b75ca5fc517b71f594edc21}, doi = {10.1016/j.compind.2014.07.010}, year = {2014}, date = {2014-01-01}, journal = {Computers in Industry}, abstract = {Nowadays, Internet technologies and standards are being systematically used by enterprises as tools to provide an infrastructure to connect people, enterprises, and applications they are using. In such complex networked enterprises, it is increasingly challenging to interchange, share, and manage internal and external digital information. In this context, to achieve interoperability between information systems is a challenging task. In order to solve the interoperability problem at semantic level, several ontology-based approaches have emerged. Although methodologies, methods, techniques, and tools to support the ontology building process were proposed, there are no mature models to measure this process, and the quality of implemented ontologies remains a major concern. This paper presents a framework, OntoQualitas, for evaluating the quality of an ontology whose purpose is the information interchange between different contexts. OntoQualitas includes previous and new measures to evaluate the ontology considering its specific purpose. Additionally, an empirical validation of OntoQualitas is presented. © 2014 Elsevier B.V. All rights reserved.}, note = {cited By 0; Article in Press}, keywords = {}, pubstate = {published}, tppubtype = {article} } Nowadays, Internet technologies and standards are being systematically used by enterprises as tools to provide an infrastructure to connect people, enterprises, and applications they are using. In such complex networked enterprises, it is increasingly challenging to interchange, share, and manage internal and external digital information. In this context, to achieve interoperability between information systems is a challenging task. In order to solve the interoperability problem at semantic level, several ontology-based approaches have emerged. Although methodologies, methods, techniques, and tools to support the ontology building process were proposed, there are no mature models to measure this process, and the quality of implemented ontologies remains a major concern. This paper presents a framework, OntoQualitas, for evaluating the quality of an ontology whose purpose is the information interchange between different contexts. OntoQualitas includes previous and new measures to evaluate the ontology considering its specific purpose. Additionally, an empirical validation of OntoQualitas is presented. © 2014 Elsevier B.V. All rights reserved. |
E.T. Leal, O.J. Chiotti, P.D. Villarreal Software agents for management dynamic inter-organizational collaborations (Artículo de revista) IEEE Latin America Transactions, 12 (2), pp. 330-341, 2014, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Leal2014330, title = {Software agents for management dynamic inter-organizational collaborations}, author = { E.T. Leal and O.J. Chiotti and P.D. Villarreal}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84900601900&partnerID=40&md5=3f718beb4537e5a111d5dfe38f775504}, doi = {10.1109/TLA.2014.6749556}, year = {2014}, date = {2014-01-01}, journal = {IEEE Latin America Transactions}, volume = {12}, number = {2}, pages = {330-341}, abstract = {The globalization, modern markets, as well as new organizational management philosophies and advances in Information and Communications Technologies, encourage organizations to establish collaboration networks or inter-organizational collaborations. In this paper we propose a technology solution based on software agents which allows supporting the management of collaborative business processes in environments dynamic inter-organizational collaborations. First, we propose a software agent platform that integrates in agent specification's the notions of Belief-Desire-Intention agent architecture with functionalities of process-aware information systems. The platform enables organizations to negotiate collaborations agreements in electronic format to establish dynamic inter-organizational collaborations and define the collaborative processes to be executed. Second, we propose a methodology that includes methods based on Model-Driven Development, which enable the generation of executable process models and the code of process-oriented agents, derived from conceptual models of collaborative processes. This methodology and methods are implemented and automated by software agents that enable the generations of these implementation artifacts, at run-time of the platform. Therefore, the platform enables the automatic generation of the technology solution that requires each organization to execute the agreed collaborative processes, where the generated artifacts are built and initialized in the platform, allowing the implementation and execution of these processes. In this way, the proposed agent-based platform allows to establish collaboration among heterogeneous and autonomous organizations focusing in the process-oriented integration. © 2003-2012 IEEE.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } The globalization, modern markets, as well as new organizational management philosophies and advances in Information and Communications Technologies, encourage organizations to establish collaboration networks or inter-organizational collaborations. In this paper we propose a technology solution based on software agents which allows supporting the management of collaborative business processes in environments dynamic inter-organizational collaborations. First, we propose a software agent platform that integrates in agent specification's the notions of Belief-Desire-Intention agent architecture with functionalities of process-aware information systems. The platform enables organizations to negotiate collaborations agreements in electronic format to establish dynamic inter-organizational collaborations and define the collaborative processes to be executed. Second, we propose a methodology that includes methods based on Model-Driven Development, which enable the generation of executable process models and the code of process-oriented agents, derived from conceptual models of collaborative processes. This methodology and methods are implemented and automated by software agents that enable the generations of these implementation artifacts, at run-time of the platform. Therefore, the platform enables the automatic generation of the technology solution that requires each organization to execute the agreed collaborative processes, where the generated artifacts are built and initialized in the platform, allowing the implementation and execution of these processes. In this way, the proposed agent-based platform allows to establish collaboration among heterogeneous and autonomous organizations focusing in the process-oriented integration. © 2003-2012 IEEE. |
E. Tello-Leal, O.J. Chiotti, P.D. Villarreal Software agent architecture for managing inter-organizational collaborations (Artículo de revista) Journal of Applied Research and Technology, 12 (3), pp. 514-526, 2014, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Tello-Leal2014514, title = {Software agent architecture for managing inter-organizational collaborations}, author = { E. Tello-Leal and O.J. Chiotti and P.D. Villarreal}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84919640019&partnerID=40&md5=14f34bbd6467676f916cbd4e9b2e678d}, year = {2014}, date = {2014-01-01}, journal = {Journal of Applied Research and Technology}, volume = {12}, number = {3}, pages = {514-526}, abstract = {The growing importance of cooperation among organizations, as a result of globalization, current market opportunities and technological advances, encourages organizations to dynamically establish inter-organizational collaborations. These collaborations are carried out by executing collaborative business processes among the organizations. In this work we propose an agent-based software architecture for managing inter-organizational collaborations. Two types of agents are provided: the Collaboration Administrator Agent and the Process Administrator Agent. The former allows organizations setting up collaborations. The latter allows organizations executing collaborative business processes. A Colored Petri Net model specifying the role, which an organization fulfills in a collaborative process, is used to carry out the behavior of the Process Administrator Agent that represents the organization. Planning and execution of the actions of the Process Administrator Agents are driven by a Colored Petri Net machine embedded to them. Thus, Process Administrator Agents do not require to have defined at design-time the protocols they can support. In addition, we propose a model-driven development method for generating Colored Petri Net models from a collaborative process model defined as interaction protocol. Finally, an implementation of the agent-based software architecture and methods based on model-driven development are presented.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } The growing importance of cooperation among organizations, as a result of globalization, current market opportunities and technological advances, encourages organizations to dynamically establish inter-organizational collaborations. These collaborations are carried out by executing collaborative business processes among the organizations. In this work we propose an agent-based software architecture for managing inter-organizational collaborations. Two types of agents are provided: the Collaboration Administrator Agent and the Process Administrator Agent. The former allows organizations setting up collaborations. The latter allows organizations executing collaborative business processes. A Colored Petri Net model specifying the role, which an organization fulfills in a collaborative process, is used to carry out the behavior of the Process Administrator Agent that represents the organization. Planning and execution of the actions of the Process Administrator Agents are driven by a Colored Petri Net machine embedded to them. Thus, Process Administrator Agents do not require to have defined at design-time the protocols they can support. In addition, we propose a model-driven development method for generating Colored Petri Net models from a collaborative process model defined as interaction protocol. Finally, an implementation of the agent-based software architecture and methods based on model-driven development are presented. |
M.A. Ale, C.M. Toledo, O.J. Chiotti, M.R. Galli A conceptual model and technological support for organizational knowledge management (Artículo de revista) Science of Computer Programming, 95 (P1), pp. 73-92, 2014, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Ale201473, title = {A conceptual model and technological support for organizational knowledge management}, author = { M.A. Ale and C.M. Toledo and O.J. Chiotti and M.R. Galli}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84922237581&partnerID=40&md5=ab94dfbf48e7e8ade689e710012aacab}, doi = {10.1016/j.scico.2013.12.012}, year = {2014}, date = {2014-01-01}, journal = {Science of Computer Programming}, volume = {95}, number = {P1}, pages = {73-92}, abstract = {Knowledge Management (KM) models proposed in the literature do not take into account all necessary aspects for effective knowledge management. First, to address this issue, this paper presents a set of requirements that any KM model or initiative should take into account to cover all aspects implied in knowing processes. These requirements were identified through a critical and evolutionary analysis of KM. Second; the paper presents a new distributed KM Conceptual Model whose building blocks are the knowledge activities involved in knowing processes. These activities are: knowledge creation, knowledge sharing, and knowledge representation and retrieval. This model provides a holistic view of KM whose purpose is helping managers understand the scope of this initiative, and supplying a guide for research and implementation in organizations. In this sense, the model presents KM as a highly social rather than technological process. Third; the paper briefly describes an architecture to provide a technological support for knowledge representation and retrieval activities of the proposed KM Conceptual Model. This architecture allows implementing a distributed organizational memory that helps to represent the knowledge context through an ontological model, providing a local perspective of each knowledge domain within the organization. Strategies for knowledge annotation, knowledge retrieval, and ontology evolution are briefly described and results of preliminary performance analysis are shown. Finally; based on the available literature, a comparative analysis of different KM models shows their adequacy for previously presented requirements. © 2014 Elsevier B.V. All rights reserved.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } Knowledge Management (KM) models proposed in the literature do not take into account all necessary aspects for effective knowledge management. First, to address this issue, this paper presents a set of requirements that any KM model or initiative should take into account to cover all aspects implied in knowing processes. These requirements were identified through a critical and evolutionary analysis of KM. Second; the paper presents a new distributed KM Conceptual Model whose building blocks are the knowledge activities involved in knowing processes. These activities are: knowledge creation, knowledge sharing, and knowledge representation and retrieval. This model provides a holistic view of KM whose purpose is helping managers understand the scope of this initiative, and supplying a guide for research and implementation in organizations. In this sense, the model presents KM as a highly social rather than technological process. Third; the paper briefly describes an architecture to provide a technological support for knowledge representation and retrieval activities of the proposed KM Conceptual Model. This architecture allows implementing a distributed organizational memory that helps to represent the knowledge context through an ontological model, providing a local perspective of each knowledge domain within the organization. Strategies for knowledge annotation, knowledge retrieval, and ontology evolution are briefly described and results of preliminary performance analysis are shown. Finally; based on the available literature, a comparative analysis of different KM models shows their adequacy for previously presented requirements. © 2014 Elsevier B.V. All rights reserved. |
L. Obrst, M. Gruninger, K. Baclawski, M. Bennett, D. Brickley, G. Berg-Cross, P. Hitzler, K. Janowicz, C. Kapp, O. Kutz, C. Lange, A. Levenchuk, F. Quattri, A. Rector, T. Schneider, S. Spero, A. Thessen, M. Vegetti, A. Vizedom, A. Westerinen, M. West, P. Yim Semantic web and big data meets applied ontology (Artículo de revista) Applied Ontology, 9 (2), pp. 155-170, 2014, (cited By 1). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Obrst2014155, title = {Semantic web and big data meets applied ontology}, author = { L. Obrst and M. Gruninger and K. Baclawski and M. Bennett and D. Brickley and G. Berg-Cross and P. Hitzler and K. Janowicz and C. Kapp and O. Kutz and C. Lange and A. Levenchuk and F. Quattri and A. Rector and T. Schneider and S. Spero and A. Thessen and M. Vegetti and A. Vizedom and A. Westerinen and M. West and P. Yim}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84933051332&partnerID=40&md5=a680afbc8380615bf6d4be2c36cbad40}, doi = {10.3233/AO-140135}, year = {2014}, date = {2014-01-01}, journal = {Applied Ontology}, volume = {9}, number = {2}, pages = {155-170}, abstract = {The role that ontologies play or can play in designing and employing semantic technologies has been widely acknowledged by the SemanticWeb and Linked Data communities. But the level of collaboration between these communities and the Applied Ontology community has been much less than expected. Also, ontologies and ontological techniques appear to be of marginalized use in Big Data and its applications. To understand this situation and foster greater collaboration, Ontology Summit 2014 brought together representatives from the Semantic Web, Linked Data, Big Data and Applied Ontology communities, to address three basic problems involving applied ontology and these communities: © 2014-IOS Press and the author.}, note = {cited By 1}, keywords = {}, pubstate = {published}, tppubtype = {article} } The role that ontologies play or can play in designing and employing semantic technologies has been widely acknowledged by the SemanticWeb and Linked Data communities. But the level of collaboration between these communities and the Applied Ontology community has been much less than expected. Also, ontologies and ontological techniques appear to be of marginalized use in Big Data and its applications. To understand this situation and foster greater collaboration, Ontology Summit 2014 brought together representatives from the Semantic Web, Linked Data, Big Data and Applied Ontology communities, to address three basic problems involving applied ontology and these communities: © 2014-IOS Press and the author. |
M. Vegetti, G. Henning ISA-88 formalization. A step towards its integration with the ISA-95 standard (Conferencia) 1333 , 2014, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @conference{Vegetti2014, title = {ISA-88 formalization. A step towards its integration with the ISA-95 standard}, author = { M. Vegetti and G. Henning}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84924385931&partnerID=40&md5=3ddd96d8a80ce1caf6ed7f47b689a637}, year = {2014}, date = {2014-01-01}, journal = {CEUR Workshop Proceedings}, volume = {1333}, abstract = {ANSI/ISA-88 and ANSI/ISA-95 are two well accepted standards in the industrial domain that provide a set of models considered as best engineering practices for industrial information systems in charge of manufacturing execution and business logistics. The main goal of ANSI/ISA-88 is the control of batch processes, whereas the one of the ANSI/ISA-95 standard is the development of an automated interface between enterprise and control systems. In consequence, both standards should interoperate. However, there are gaps and overlappings between their corresponding terminologies. Moreover, there are additional problems, such as semantic inconsistencies within each of the standards, as well as the use of an informal graphical representations in one of the ANSI/ISA-88 models. This work presents an ontological approach that aims at formalizing the ISA-88 standard as a first step towards its integration with a formal representation of the ANSI/ISA-95 one. Additionally, methodological aspects of the ontology development process are presented. Copyright © by the paper's authors.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {conference} } ANSI/ISA-88 and ANSI/ISA-95 are two well accepted standards in the industrial domain that provide a set of models considered as best engineering practices for industrial information systems in charge of manufacturing execution and business logistics. The main goal of ANSI/ISA-88 is the control of batch processes, whereas the one of the ANSI/ISA-95 standard is the development of an automated interface between enterprise and control systems. In consequence, both standards should interoperate. However, there are gaps and overlappings between their corresponding terminologies. Moreover, there are additional problems, such as semantic inconsistencies within each of the standards, as well as the use of an informal graphical representations in one of the ANSI/ISA-88 models. This work presents an ontological approach that aims at formalizing the ISA-88 standard as a first step towards its integration with a formal representation of the ANSI/ISA-95 one. Additionally, methodological aspects of the ontology development process are presented. Copyright © by the paper's authors. |
J. Flores, J.M. Montagna, A.R. Vecchietti An optimization approach for long term investments planning in energy (Artículo de revista) Applied Energy, 122 , pp. 162-178, 2014, (cited By 4). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Flores2014162, title = {An optimization approach for long term investments planning in energy}, author = { J. Flores and J.M. Montagna and A.R. Vecchietti}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84896885134&partnerID=40&md5=9e04f60082296cc2c19de9c1757ba299}, doi = {10.1016/j.apenergy.2014.02.002}, year = {2014}, date = {2014-01-01}, journal = {Applied Energy}, volume = {122}, pages = {162-178}, abstract = {This paper presents a mathematical programming model for planning investment in energy sources. The problem formulation considers the use of renewable and not renewable sources and demands, revenues, operation, start-up, and amortization costs of new energy facilities and the amount of reserves of fossil fuels. The objective is the maximization of the Net Present Value (NPV) in the time horizon. The results provide the visualization of the investments made: time periods in and their amounts and also how the energy matrix is affected by those investments. In particular the model was applied to Argentina. The most important feature of the model is the ability to assess and to plan the evolution of the energetic matrix at different circumstances. © 2014 Elsevier Ltd.}, note = {cited By 4}, keywords = {}, pubstate = {published}, tppubtype = {article} } This paper presents a mathematical programming model for planning investment in energy sources. The problem formulation considers the use of renewable and not renewable sources and demands, revenues, operation, start-up, and amortization costs of new energy facilities and the amount of reserves of fossil fuels. The objective is the maximization of the Net Present Value (NPV) in the time horizon. The results provide the visualization of the investments made: time periods in and their amounts and also how the energy matrix is affected by those investments. In particular the model was applied to Argentina. The most important feature of the model is the ability to assess and to plan the evolution of the energetic matrix at different circumstances. © 2014 Elsevier Ltd. |
M.A. Rodriguez, A.R. Vecchietti, I. Harjunkoski, I.E. Grossmann Optimal supply chain design and management over a multi-period horizon under demand uncertainty. Part I: MINLP and MILP models (Artículo de revista) Computers and Chemical Engineering, 62 , pp. 194-210, 2014, (cited By 7). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Rodriguez2014194, title = {Optimal supply chain design and management over a multi-period horizon under demand uncertainty. Part I: MINLP and MILP models}, author = { M.A. Rodriguez and A.R. Vecchietti and I. Harjunkoski and I.E. Grossmann}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84894900217&partnerID=40&md5=7cae538f74e7240d314dea250e7e86dd}, doi = {10.1016/j.compchemeng.2013.10.007}, year = {2014}, date = {2014-01-01}, journal = {Computers and Chemical Engineering}, volume = {62}, pages = {194-210}, abstract = {An optimization model is proposed to redesign the supply chain of spare part delivery under demand uncertainty from strategic and tactical perspectives in a planning horizon consisting of multiple periods. Long term decisions involve new installations, expansions and elimination of warehouses and factories handling multiple products. It is also considered which warehouses should be used as repair work-shops in order to store, repair and deliver used units to customers. Tactical planning includes deciding inventory levels (safety stock and expected inventory) for each type of spare part in distribution centers and customer plants, as well as the connection links between the supply chain nodes. Capacity constraints are also taken into account when planning inventory levels. At the tactical level it is determined how demand of failing units is satisfied, and whether to use new or used parts. The uncertain demand is addressed by defining the optimal amount of safety stock that guarantees certain service level at a customer plant. In addition, the risk-pooling effect is taken into account when defining inventory levels in distribution centers and customer zones. Due to the nonlinear nature of the original formulation, a piece-wise linearization approach is applied to obtain a tight lower bound of the optimal solution. The formulation can be adapted to several industry-critical units and the supply chain of electric motors is provided here as an example. © 2013 Elsevier Ltd.}, note = {cited By 7}, keywords = {}, pubstate = {published}, tppubtype = {article} } An optimization model is proposed to redesign the supply chain of spare part delivery under demand uncertainty from strategic and tactical perspectives in a planning horizon consisting of multiple periods. Long term decisions involve new installations, expansions and elimination of warehouses and factories handling multiple products. It is also considered which warehouses should be used as repair work-shops in order to store, repair and deliver used units to customers. Tactical planning includes deciding inventory levels (safety stock and expected inventory) for each type of spare part in distribution centers and customer plants, as well as the connection links between the supply chain nodes. Capacity constraints are also taken into account when planning inventory levels. At the tactical level it is determined how demand of failing units is satisfied, and whether to use new or used parts. The uncertain demand is addressed by defining the optimal amount of safety stock that guarantees certain service level at a customer plant. In addition, the risk-pooling effect is taken into account when defining inventory levels in distribution centers and customer zones. Due to the nonlinear nature of the original formulation, a piece-wise linearization approach is applied to obtain a tight lower bound of the optimal solution. The formulation can be adapted to several industry-critical units and the supply chain of electric motors is provided here as an example. © 2013 Elsevier Ltd. |
J. Flores, M.L. Cunico, M.A. Rodriguez, A.R. Vecchietti Optimizing the energy production infrastructure considering uncertainty in fossil resource availability (Artículo de revista) Industrial and Engineering Chemistry Research, 53 (44), pp. 16984-17001, 2014, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Flores201416984, title = {Optimizing the energy production infrastructure considering uncertainty in fossil resource availability}, author = { J. Flores and M.L. Cunico and M.A. Rodriguez and A.R. Vecchietti}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84910048616&partnerID=40&md5=4504ca14fcba44b4d7a97caf646cef5a}, doi = {10.1021/ie500459a}, year = {2014}, date = {2014-01-01}, journal = {Industrial and Engineering Chemistry Research}, volume = {53}, number = {44}, pages = {16984-17001}, abstract = {Fossil sources scarcity and environmental contamination are the major factors that put the energy industry into focus. Considering the lack of accuracy in fossil reserves, we propose an optimization model to plan investments in the Argentinean energy structure taking into account uncertainty in the fossil source availability. Tactical decisions, such as the amount of primary and secondary sources produced, are included and the emission of greenhouse gases is penalized in the objective function to improve the environmental impact of the energy structure. Also, two different methods are used to model uncertainty. Fuzzy set theory is applied to generate scenarios that show the random behavior of resource availability parameters. In addition, a two-stage stochastic model is proposed to integrate the different decision levels involved in the problem and represent resource availability uncertainty in fossil fuels. Both approaches complement each other to obtain a comprehensive solution of the energy planning problem. © 2014 American Chemical Society.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } Fossil sources scarcity and environmental contamination are the major factors that put the energy industry into focus. Considering the lack of accuracy in fossil reserves, we propose an optimization model to plan investments in the Argentinean energy structure taking into account uncertainty in the fossil source availability. Tactical decisions, such as the amount of primary and secondary sources produced, are included and the emission of greenhouse gases is penalized in the objective function to improve the environmental impact of the energy structure. Also, two different methods are used to model uncertainty. Fuzzy set theory is applied to generate scenarios that show the random behavior of resource availability parameters. In addition, a two-stage stochastic model is proposed to integrate the different decision levels involved in the problem and represent resource availability uncertainty in fossil fuels. Both approaches complement each other to obtain a comprehensive solution of the energy planning problem. © 2014 American Chemical Society. |
P. Druetta, P.A. Aguirre, S.F. Mussati Minimizing the total cost of multi effect evaporation systems for seawater desalination (Artículo de revista) Desalination, 344 , pp. 431-445, 2014, (cited By 4). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Druetta2014431, title = {Minimizing the total cost of multi effect evaporation systems for seawater desalination}, author = { P. Druetta and P.A. Aguirre and S.F. Mussati}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84899786310&partnerID=40&md5=dad4775f908adb9568352df2690d6b53}, doi = {10.1016/j.desal.2014.04.007}, year = {2014}, date = {2014-01-01}, journal = {Desalination}, volume = {344}, pages = {431-445}, abstract = {A mathematical model developed recently by the authors is extended into a non linear mathematical programming problem to determine the nominal optimal sizing of equipment (heat transfer area) and optimal operation conditions that satisfy a fixed nominal production of fresh water at minimum total annual cost. Relative marginal values computed from the optimized results and a global sensitivity analysis are then used to rank the process parameters according to their influences on the total cost. Once the nominal design and operating conditions are determined, a new optimization problem is stated: Is it possible to increase (using the nominal optimal design) the water production over the nominal capacity of production? Thus, the new optimization problem consists of the maximization of the distillate production. Optimization results for both design problems are presented and discussed in detail. One of the obtained results reveals that the increase of the distillate production in 20% over the nominal capacity (200. kg/s), leads to increases in the total operating cost from 10.8 to 11.4. million. US$/yr while the seawater flow rate and the steam temperature increase about 23 and 5%, respectively. © 2014 Elsevier B.V.}, note = {cited By 4}, keywords = {}, pubstate = {published}, tppubtype = {article} } A mathematical model developed recently by the authors is extended into a non linear mathematical programming problem to determine the nominal optimal sizing of equipment (heat transfer area) and optimal operation conditions that satisfy a fixed nominal production of fresh water at minimum total annual cost. Relative marginal values computed from the optimized results and a global sensitivity analysis are then used to rank the process parameters according to their influences on the total cost. Once the nominal design and operating conditions are determined, a new optimization problem is stated: Is it possible to increase (using the nominal optimal design) the water production over the nominal capacity of production? Thus, the new optimization problem consists of the maximization of the distillate production. Optimization results for both design problems are presented and discussed in detail. One of the obtained results reveals that the increase of the distillate production in 20% over the nominal capacity (200. kg/s), leads to increases in the total operating cost from 10.8 to 11.4. million. US$/yr while the seawater flow rate and the steam temperature increase about 23 and 5%, respectively. © 2014 Elsevier B.V. |
C. Pieragostini, P.A. Aguirre, M.C. Mussati Life cycle assessment of corn-based ethanol production in Argentina (Artículo de revista) Science of the Total Environment, 472 , pp. 212-225, 2014, (cited By 3). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Pieragostini2014212, title = {Life cycle assessment of corn-based ethanol production in Argentina}, author = { C. Pieragostini and P.A. Aguirre and M.C. Mussati}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84888787868&partnerID=40&md5=bf6dd3619813f592a596435124cd49c9}, doi = {10.1016/j.scitotenv.2013.11.012}, year = {2014}, date = {2014-01-01}, journal = {Science of the Total Environment}, volume = {472}, pages = {212-225}, abstract = {The promotion of biofuels as energy for transportation in the world is mainly driven by the perspective of oil depletion, the concerns about energy security and global warming. In Argentina, the legislation has imposed the use of biofuels in blend with fossil fuels (5 to 10%) in the transport sector.The aim of this paper is to assess the environmental impact of corn-based ethanol production in the province of Santa Fe in Argentina based on the life cycle assessment methodology.The studied system includes from raw materials production to anhydrous ethanol production using dry milling technology. The system is divided into two subsystems: agricultural system and refinery system. The treatment of stillage is considered as well as the use of co-products (distiller's dried grains with solubles), but the use and/or application of the produced biofuel is not analyzed: a cradle-to-gate analysis is presented. As functional unit, 1. MJ of anhydrous ethanol at biorefinery is chosen.Two life cycle impact assessment methods are selected to perform the study: Eco-indicator 99 and ReCiPe. SimaPro is the life cycle assessment software used. The influence of the perspectives on the model is analyzed by sensitivity analysis for both methods.The two selected methods identify the same relevant processes. The use of fertilizers and resources, seeds production, harvesting process, corn drying, and phosphorus fertilizers and acetamide-anillide-compounds production are the most relevant processes in agricultural system. For refinery system, corn production, supplied heat and burned natural gas result in the higher contributions. The use of distiller's dried grains with solubles has an important positive environmental impact. © 2013 Elsevier B.V.}, note = {cited By 3}, keywords = {}, pubstate = {published}, tppubtype = {article} } The promotion of biofuels as energy for transportation in the world is mainly driven by the perspective of oil depletion, the concerns about energy security and global warming. In Argentina, the legislation has imposed the use of biofuels in blend with fossil fuels (5 to 10%) in the transport sector.The aim of this paper is to assess the environmental impact of corn-based ethanol production in the province of Santa Fe in Argentina based on the life cycle assessment methodology.The studied system includes from raw materials production to anhydrous ethanol production using dry milling technology. The system is divided into two subsystems: agricultural system and refinery system. The treatment of stillage is considered as well as the use of co-products (distiller's dried grains with solubles), but the use and/or application of the produced biofuel is not analyzed: a cradle-to-gate analysis is presented. As functional unit, 1. MJ of anhydrous ethanol at biorefinery is chosen.Two life cycle impact assessment methods are selected to perform the study: Eco-indicator 99 and ReCiPe. SimaPro is the life cycle assessment software used. The influence of the perspectives on the model is analyzed by sensitivity analysis for both methods.The two selected methods identify the same relevant processes. The use of fertilizers and resources, seeds production, harvesting process, corn drying, and phosphorus fertilizers and acetamide-anillide-compounds production are the most relevant processes in agricultural system. For refinery system, corn production, supplied heat and burned natural gas result in the higher contributions. The use of distiller's dried grains with solubles has an important positive environmental impact. © 2013 Elsevier B.V. |
B.A. Talagañis, G.O. Meyer, D.G. Oliva, M. Fuentes, P.A. Aguirre Modeling and optimal design of cyclic processes for hydrogen purification using hydride-forming metals (Artículo de revista) International Journal of Hydrogen Energy, 39 (33), pp. 18997-19008, 2014, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Talagañis201418997, title = {Modeling and optimal design of cyclic processes for hydrogen purification using hydride-forming metals}, author = { B.A. Talagañis and G.O. Meyer and D.G. Oliva and M. Fuentes and P.A. Aguirre}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84908223819&partnerID=40&md5=adaf534a9f3637fa48046f103d540c15}, doi = {10.1016/j.ijhydene.2014.09.045}, year = {2014}, date = {2014-01-01}, journal = {International Journal of Hydrogen Energy}, volume = {39}, number = {33}, pages = {18997-19008}, abstract = {Hydrogen at high purity degrees can be obtained by using the well-known Pressure Swing Adsorption (PSA) process. In this paper, a Pressure Swing Absorption (PSAb) alternative operating batch wise is analyzed. An optimal design of cyclic processes for hydrogen purification using hydride-forming metals as absorption material is addressed. The selected case study is a thermo-chemical treatment process that consumes high purity hydrogen to reduce oxides and generates a waste stream that contains residual H2. PSAb process is fed with this hydrogen-poor stream; and high purity hydrogen recovery levels are obtained. A mathematical model based on an energy integrated scheme is presented to develop the optimal process design and to obtain optimal operating conditions. Various optimized solutions are compared by modifying key parameters or restriction equations. Thus, an interesting trade-off between H2 recovery and system size is analyzed. Large systems operate at large cycle times, obtaining up to 98% of H2 recovery in the order of hours, whereas small systems can recover up to 60% of H2 in short cycles of a few seconds. © 2014 Hydrogen Energy Publications, LLC. All rights reserved.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } Hydrogen at high purity degrees can be obtained by using the well-known Pressure Swing Adsorption (PSA) process. In this paper, a Pressure Swing Absorption (PSAb) alternative operating batch wise is analyzed. An optimal design of cyclic processes for hydrogen purification using hydride-forming metals as absorption material is addressed. The selected case study is a thermo-chemical treatment process that consumes high purity hydrogen to reduce oxides and generates a waste stream that contains residual H2. PSAb process is fed with this hydrogen-poor stream; and high purity hydrogen recovery levels are obtained. A mathematical model based on an energy integrated scheme is presented to develop the optimal process design and to obtain optimal operating conditions. Various optimized solutions are compared by modifying key parameters or restriction equations. Thus, an interesting trade-off between H2 recovery and system size is analyzed. Large systems operate at large cycle times, obtaining up to 98% of H2 recovery in the order of hours, whereas small systems can recover up to 60% of H2 in short cycles of a few seconds. © 2014 Hydrogen Energy Publications, LLC. All rights reserved. |
F.J. Serralunga, P.A. Aguirre, M.C. Mussati Including disjunctions in real-time optimization (Artículo de revista) Industrial and Engineering Chemistry Research, 53 (44), pp. 17200-17213, 2014, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Serralunga201417200, title = {Including disjunctions in real-time optimization}, author = { F.J. Serralunga and P.A. Aguirre and M.C. Mussati}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84910090255&partnerID=40&md5=1c4d95e34cdff669c48cd0b91b94e2c0}, doi = {10.1021/ie5004619}, year = {2014}, date = {2014-01-01}, journal = {Industrial and Engineering Chemistry Research}, volume = {53}, number = {44}, pages = {17200-17213}, abstract = {Real-time optimization (RTO) is widely used in industry to improve the steady-state performance of a process using the available measurements, reacting to changing prices and demands scenarios and respecting operating, contractual, and environmental constraints. Traditionally, RTO has used nonlinear continuous formulations to model the process. Mixed-integer formulations have not been used in RTO, because of the need of a fast solution (on the order of seconds or a few minutes), and because many discrete decisions, such as startups or shutdowns, are taken with less frequency in a scheduling layer. This work proposes the use of disjunctions in RTO models, listing a series of examples of discrete decisions (different to startups or shutdowns) that can be addressed by RTO. Two model adaptation approaches (the two-step approach and the modifier adaptation strategy) are revised and modified to make them suitable for RTO with discrete decisions. Some common techniques used in RTO (such as filtering the optimal inputs) are also analyzed and adapted for a formulation with disjunctions. The performance of RTO with disjunctions is shown by a case study in which a generic process is optimized. The results show that the performance of a process can be improved by RTO with discrete decisions. The system converges to the vicinity of the real plant optimum when constraints gradients are corrected, even under structural and parametric mismatch. © 2014 American Chemical Society.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } Real-time optimization (RTO) is widely used in industry to improve the steady-state performance of a process using the available measurements, reacting to changing prices and demands scenarios and respecting operating, contractual, and environmental constraints. Traditionally, RTO has used nonlinear continuous formulations to model the process. Mixed-integer formulations have not been used in RTO, because of the need of a fast solution (on the order of seconds or a few minutes), and because many discrete decisions, such as startups or shutdowns, are taken with less frequency in a scheduling layer. This work proposes the use of disjunctions in RTO models, listing a series of examples of discrete decisions (different to startups or shutdowns) that can be addressed by RTO. Two model adaptation approaches (the two-step approach and the modifier adaptation strategy) are revised and modified to make them suitable for RTO with discrete decisions. Some common techniques used in RTO (such as filtering the optimal inputs) are also analyzed and adapted for a formulation with disjunctions. The performance of RTO with disjunctions is shown by a case study in which a generic process is optimized. The results show that the performance of a process can be improved by RTO with discrete decisions. The system converges to the vicinity of the real plant optimum when constraints gradients are corrected, even under structural and parametric mismatch. © 2014 American Chemical Society. |
M.S. Mazzei, M.C. Mussati, S.F. Mussati NLP model-based optimal design of LiBr-H2O absorption refrigeration systems (Artículo de revista) International Journal of Refrigeration, 38 (1), pp. 58-70, 2014, (cited By 1). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Mazzei201458, title = {NLP model-based optimal design of LiBr-H2O absorption refrigeration systems}, author = { M.S. Mazzei and M.C. Mussati and S.F. Mussati}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84892596238&partnerID=40&md5=6c37da45569e26911b4d410ccc91d682}, doi = {10.1016/j.ijrefrig.2013.10.012}, year = {2014}, date = {2014-01-01}, journal = {International Journal of Refrigeration}, volume = {38}, number = {1}, pages = {58-70}, abstract = {This paper addresses the optimization of a single effect absorption refrigeration system operating with lithium bromide-water solution. A non-linear programming mathematical model is developed to determine the operating conditions and the distribution of the total heat transfer area (sizes) along the involved process units to optimize the following two objective functions: (i) maximization of the coefficient of performance for a given amount of the total heat transfer area, and (ii) minimization of the total heat transfer area of the system for a given cooling capacity. The proposed model can either be used for simulation or optimization purposes. Simulated or optimized values of temperature, pressure, composition and flow rate of all streams and sizing of each process unit are predicted. In addition, because of the non linear nature of the resulting model, a systematic solution procedure is proposed in order to guarantee the model convergence. A detailed discussion of the optimization results are presented through different case studies. © 2013 Elsevier B.V. All rights reserved.}, note = {cited By 1}, keywords = {}, pubstate = {published}, tppubtype = {article} } This paper addresses the optimization of a single effect absorption refrigeration system operating with lithium bromide-water solution. A non-linear programming mathematical model is developed to determine the operating conditions and the distribution of the total heat transfer area (sizes) along the involved process units to optimize the following two objective functions: (i) maximization of the coefficient of performance for a given amount of the total heat transfer area, and (ii) minimization of the total heat transfer area of the system for a given cooling capacity. The proposed model can either be used for simulation or optimization purposes. Simulated or optimized values of temperature, pressure, composition and flow rate of all streams and sizing of each process unit are predicted. In addition, because of the non linear nature of the resulting model, a systematic solution procedure is proposed in order to guarantee the model convergence. A detailed discussion of the optimization results are presented through different case studies. © 2013 Elsevier B.V. All rights reserved. |
M.A. Reinheimer, J.R. Medina, N.J. Scenna, S.F. Mussati, M. Freyre, G.A. Pérez Mathematical modeling and simulation of soluble protein extraction during leaching process in surimi elaboration (Artículo de revista) Journal of Food Engineering, 120 (1), pp. 167-174, 2014, (cited By 2). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Reinheimer2014167, title = {Mathematical modeling and simulation of soluble protein extraction during leaching process in surimi elaboration}, author = { M.A. Reinheimer and J.R. Medina and N.J. Scenna and S.F. Mussati and M. Freyre and G.A. Pérez}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84883163984&partnerID=40&md5=592e8496b770a54f16ccb64438756af8}, doi = {10.1016/j.jfoodeng.2013.07.030}, year = {2014}, date = {2014-01-01}, journal = {Journal of Food Engineering}, volume = {120}, number = {1}, pages = {167-174}, abstract = {This work presents a mathematical model to simulate the extraction process of soluble protein from sábalo (Prochilodus platensis) during the surimi elaboration. The mathematical model consists of both partial differential and algebraic equations. Central finite difference method and the explicit scheme were applied to discretize the partial differential equation. The resulting model was implemented into the optimization environment General Algebraic Modeling System (GAMS). Experimental data obtained from laboratory scale using sábalo as raw material, was used to verify the output results of the proposed model. A good agreement between experimental and simulated extraction yields was obtained (R2 = 0.9552). Once validated, the model was used to investigate the influence of several parameters such as, particle's diameter, volume fraction of the solvent, residence time and agitation velocity on the extraction efficiency. The results are presented and discussed through different case studies. © 2013 Elsevier Ltd. All rights reserved.}, note = {cited By 2}, keywords = {}, pubstate = {published}, tppubtype = {article} } This work presents a mathematical model to simulate the extraction process of soluble protein from sábalo (Prochilodus platensis) during the surimi elaboration. The mathematical model consists of both partial differential and algebraic equations. Central finite difference method and the explicit scheme were applied to discretize the partial differential equation. The resulting model was implemented into the optimization environment General Algebraic Modeling System (GAMS). Experimental data obtained from laboratory scale using sábalo as raw material, was used to verify the output results of the proposed model. A good agreement between experimental and simulated extraction yields was obtained (R2 = 0.9552). Once validated, the model was used to investigate the influence of several parameters such as, particle's diameter, volume fraction of the solvent, residence time and agitation velocity on the extraction efficiency. The results are presented and discussed through different case studies. © 2013 Elsevier Ltd. All rights reserved. |
J.I. Manassaldi, P.L. Mores, N.J. Scenna, S.F. Mussati Optimal design and operating conditions of an integrated plant using a natural gas combined cycle and postcombustion CO2 capture (Artículo de revista) Industrial and Engineering Chemistry Research, 53 (44), pp. 17026-17042, 2014, (cited By 1). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Manassaldi201417026, title = {Optimal design and operating conditions of an integrated plant using a natural gas combined cycle and postcombustion CO2 capture}, author = { J.I. Manassaldi and P.L. Mores and N.J. Scenna and S.F. Mussati}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84909619821&partnerID=40&md5=3cb20b1a57cafd6bea3b0c13a7e7d028}, doi = {10.1021/ie5004637}, year = {2014}, date = {2014-01-01}, journal = {Industrial and Engineering Chemistry Research}, volume = {53}, number = {44}, pages = {17026-17042}, abstract = {This paper deals with the simultaneous optimization of the operating conditions and sizes of each one of the process units of a natural gas combined cycle coupled to a postcombustion CO2 capture system. Precisely, from the mathematical models previously developed by the authors for each stand-alone process, a new optimization nonlinear programming (NLP) model is proposed in order to optimize the whole process but with the main characteristic that several feasible alternatives to integrate both processes are simultaneously embedded. Therefore, as a result of the model, the best integration schema, optimal operating conditions, and size of each process unit are obtained at the same time. No integer variables are needed to model discrete decisions in both processes. The maximization of the overall thermal efficiency is considered as an objective function. However, the proposed NLP model can be easily extended into a mixed-integer nonlinear programming (MINLP) model if it is necessary for cost minimization. The optimization results are discussed in detail, and they are compared with suboptimal configurations including reference cases. © 2014 American Chemical Society.}, note = {cited By 1}, keywords = {}, pubstate = {published}, tppubtype = {article} } This paper deals with the simultaneous optimization of the operating conditions and sizes of each one of the process units of a natural gas combined cycle coupled to a postcombustion CO2 capture system. Precisely, from the mathematical models previously developed by the authors for each stand-alone process, a new optimization nonlinear programming (NLP) model is proposed in order to optimize the whole process but with the main characteristic that several feasible alternatives to integrate both processes are simultaneously embedded. Therefore, as a result of the model, the best integration schema, optimal operating conditions, and size of each process unit are obtained at the same time. No integer variables are needed to model discrete decisions in both processes. The maximization of the overall thermal efficiency is considered as an objective function. However, the proposed NLP model can be easily extended into a mixed-integer nonlinear programming (MINLP) model if it is necessary for cost minimization. The optimization results are discussed in detail, and they are compared with suboptimal configurations including reference cases. © 2014 American Chemical Society. |
P.L. Mores, E. Godoy, S.F. Mussati, N.J. Scenna A NGCC power plant with a CO2 post-combustion capture option. Optimal economics for different generation/capture goals (Artículo de revista) Chemical Engineering Research and Design, 92 (7), pp. 1329-1353, 2014, (cited By 4). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Mores20141329, title = {A NGCC power plant with a CO2 post-combustion capture option. Optimal economics for different generation/capture goals}, author = { P.L. Mores and E. Godoy and S.F. Mussati and N.J. Scenna}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84901668433&partnerID=40&md5=62ae26192322f75c5c7c86a5e76a551d}, doi = {10.1016/j.cherd.2013.11.013}, year = {2014}, date = {2014-01-01}, journal = {Chemical Engineering Research and Design}, volume = {92}, number = {7}, pages = {1329-1353}, abstract = {Fossil fuel power plants are one of the major sources of electricity generation, although invariably release greenhouse gases. Due to international treaties and countries regulations, CO2 emissions reduction is increasingly becoming key in the generators' economics. NGCC power plants constitute a widely used generation technology, from which CO2 capture through a post-combustion and MEA absorption option constitutes a technological challenge due to the low concentration of pollutants in the flue gas and the high energy requirements of the sequestration process.In the present work, a rigorous optimization model is developed to address the design and operation of power plants coupled to capture systems. The equations-oriented modeling strategy here utilized can address greenfield designs in which design and operating variables are simultaneously optimized, in order to ensure that the system will be able to meet process requirements at minimum cost. Then, an analysis of the electricity cost, CO2 avoidance cost, energy penalties, as well as the optimal values of decision variables is thoroughly pursued.Different economic tradeoffs are comprised at the optimal solutions for the joint project, as given by the different discrete and continuous decisions that the designer needs to weight in order to achieve the desired generation and capture goals, including the number of parallel capture trains, the inherent efficiency of each recovery unit, and the overall emissions reduction rate.In this context, the joint optimization of the NGCC power plant with the amine-based capture option results in a novel configuration where 731MW are optimally generated for supplying both the external demand and the capture plant energy requirements, and achieving an overall CO2 emissions reduction rate of 82.1% by means of a three capture trains arrangement, where 13.4% of the flue gas stream is bypassed and 94.8% of the CO2 gets recovered at each unit.This new generation/capture project features optimal values of its economic performance indicators, with an avoidance cost of 81.7US$ per tonne of CO2 captured, which can only be secured by simultaneously optimizing the design and operating variables of both systems on a start-of-the-art optimization algorithm. © 2013 The Institution of Chemical Engineers.}, note = {cited By 4}, keywords = {}, pubstate = {published}, tppubtype = {article} } Fossil fuel power plants are one of the major sources of electricity generation, although invariably release greenhouse gases. Due to international treaties and countries regulations, CO2 emissions reduction is increasingly becoming key in the generators' economics. NGCC power plants constitute a widely used generation technology, from which CO2 capture through a post-combustion and MEA absorption option constitutes a technological challenge due to the low concentration of pollutants in the flue gas and the high energy requirements of the sequestration process.In the present work, a rigorous optimization model is developed to address the design and operation of power plants coupled to capture systems. The equations-oriented modeling strategy here utilized can address greenfield designs in which design and operating variables are simultaneously optimized, in order to ensure that the system will be able to meet process requirements at minimum cost. Then, an analysis of the electricity cost, CO2 avoidance cost, energy penalties, as well as the optimal values of decision variables is thoroughly pursued.Different economic tradeoffs are comprised at the optimal solutions for the joint project, as given by the different discrete and continuous decisions that the designer needs to weight in order to achieve the desired generation and capture goals, including the number of parallel capture trains, the inherent efficiency of each recovery unit, and the overall emissions reduction rate.In this context, the joint optimization of the NGCC power plant with the amine-based capture option results in a novel configuration where 731MW are optimally generated for supplying both the external demand and the capture plant energy requirements, and achieving an overall CO2 emissions reduction rate of 82.1% by means of a three capture trains arrangement, where 13.4% of the flue gas stream is bypassed and 94.8% of the CO2 gets recovered at each unit.This new generation/capture project features optimal values of its economic performance indicators, with an avoidance cost of 81.7US$ per tonne of CO2 captured, which can only be secured by simultaneously optimizing the design and operating variables of both systems on a start-of-the-art optimization algorithm. © 2013 The Institution of Chemical Engineers. |
J.I. Manassaldi, N.J. Scenna, S.F. Mussati Optimization mathematical model for the detailed design of air cooled heat exchangers (Artículo de revista) Energy, 64 , pp. 734-746, 2014, (cited By 3). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Manassaldi2014734, title = {Optimization mathematical model for the detailed design of air cooled heat exchangers}, author = { J.I. Manassaldi and N.J. Scenna and S.F. Mussati}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84891487326&partnerID=40&md5=cc19fed1f0b5e618549159745bbb158b}, doi = {10.1016/j.energy.2013.09.062}, year = {2014}, date = {2014-01-01}, journal = {Energy}, volume = {64}, pages = {734-746}, abstract = {This paper presents a disjunctive mathematical model for the optimal design of air cooled heat exchangers. The model involves seven discrete decisions which are related to the selection of the type of the finned tube, number of tube rows, number of tube per row, number of passes, fins per unit length, mean fin thickness and the type of the flow regime. Each discrete decision is modeled using disjunctions, boolean variables and logical propositions. The main continuous decisions are: fan diameter, bundle width, tube length, pressure drops and velocities in both sides of the ACHE, heat transfer area, fan power consumption. Then, the resulting generalized disjunctive programming model is reformulated as a mixed integer non-linear programming, implemented in GAMS (general algebraic modeling system) and solved using a branch-and-bound method. The proposed model was successfully verified by comparing the obtained output results with different designs taken from the literature. Then, the model is solved to obtain the optimal designs corresponding to the following optimization criteria: a) minimization the total annual cost which includes investment (heat transfer area) and operating cost (fan power consumption), b) minimization the heat transfer area and c) minimization the fan power consumption. Obtained optimal and sub-optimal designs are compared in detail. © 2013.}, note = {cited By 3}, keywords = {}, pubstate = {published}, tppubtype = {article} } This paper presents a disjunctive mathematical model for the optimal design of air cooled heat exchangers. The model involves seven discrete decisions which are related to the selection of the type of the finned tube, number of tube rows, number of tube per row, number of passes, fins per unit length, mean fin thickness and the type of the flow regime. Each discrete decision is modeled using disjunctions, boolean variables and logical propositions. The main continuous decisions are: fan diameter, bundle width, tube length, pressure drops and velocities in both sides of the ACHE, heat transfer area, fan power consumption. Then, the resulting generalized disjunctive programming model is reformulated as a mixed integer non-linear programming, implemented in GAMS (general algebraic modeling system) and solved using a branch-and-bound method. The proposed model was successfully verified by comparing the obtained output results with different designs taken from the literature. Then, the model is solved to obtain the optimal designs corresponding to the following optimization criteria: a) minimization the total annual cost which includes investment (heat transfer area) and operating cost (fan power consumption), b) minimization the heat transfer area and c) minimization the fan power consumption. Obtained optimal and sub-optimal designs are compared in detail. © 2013. |
G. Corsano, G. Guillén-Gosálbez, J.M. Montagna Computational methods for the simultaneous strategic planning of supply chains and batch chemical manufacturing sites (Artículo de revista) Computers and Chemical Engineering, 60 , pp. 154-171, 2014, (cited By 3). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Corsano2014154, title = {Computational methods for the simultaneous strategic planning of supply chains and batch chemical manufacturing sites}, author = { G. Corsano and G. Guillén-Gosálbez and J.M. Montagna}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84884955295&partnerID=40&md5=71fa0a4d3ac9939c25e86047fe8eaaea}, doi = {10.1016/j.compchemeng.2013.09.001}, year = {2014}, date = {2014-01-01}, journal = {Computers and Chemical Engineering}, volume = {60}, pages = {154-171}, abstract = {In this work we present efficient solution strategies for the task of designing supply chains with the explicit consideration of the detailed plant performance of the embedded facilities. Taking as a basis a mixed-integer linear programming (MILP) model introduced in a previous work, we propose three solution strategies that exploit the underlying mathematical structure: A bi-level algorithm, a Lagrangean decomposition method, and a hybrid approach that combines features from both of these two methods. Numerical results show that the bi-level method outperforms the others, leading to significant CPU savings when compared to the full space MILP. © 2013.}, note = {cited By 3}, keywords = {}, pubstate = {published}, tppubtype = {article} } In this work we present efficient solution strategies for the task of designing supply chains with the explicit consideration of the detailed plant performance of the embedded facilities. Taking as a basis a mixed-integer linear programming (MILP) model introduced in a previous work, we propose three solution strategies that exploit the underlying mathematical structure: A bi-level algorithm, a Lagrangean decomposition method, and a hybrid approach that combines features from both of these two methods. Numerical results show that the bi-level method outperforms the others, leading to significant CPU savings when compared to the full space MILP. © 2013. |
Y. Fumero, G. Corsano, J.M. Montagna Simultaneous batching and scheduling of batch plants that operate in a campaign-mode, considering nonidentical parallel units and sequence-dependent changeovers (Artículo de revista) Industrial and Engineering Chemistry Research, 53 (44), pp. 17059-17074, 2014, (cited By 1). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Fumero201417059, title = {Simultaneous batching and scheduling of batch plants that operate in a campaign-mode, considering nonidentical parallel units and sequence-dependent changeovers}, author = { Y. Fumero and G. Corsano and J.M. Montagna}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84910092411&partnerID=40&md5=11c0ec640c08a8e037ee034118846a88}, doi = {10.1021/ie500454x}, year = {2014}, date = {2014-01-01}, journal = {Industrial and Engineering Chemistry Research}, volume = {53}, number = {44}, pages = {17059-17074}, abstract = {A mathematical modeling framework for the simultaneous batching and scheduling of multiproduct batch plants is proposed in this work. The scheduling decisions are formulated according to campaign-based operation mode. When a stable context can be assumed on the time horizon taken into account, this operation mode assures a more efficient production management. In addition, sequence-dependent changeover times and different unit sizes for parallel units in each stage are considered. Given the plant configuration and unit sizes, the total amount of each product to be produced and the product recipes, the proposed approach determines the number of batches that compose the production campaign and their sizes, the batches assignment to units, the sequencing of batches in each unit for each stage, and the timing of batches in each unit in order to minimize the campaign cycle time. A solution strategy is proposed to enhance the computational performance of the simultaneous optimization. The approach capabilities are shown through three numerical examples. © 2014 American Chemical Society.}, note = {cited By 1}, keywords = {}, pubstate = {published}, tppubtype = {article} } A mathematical modeling framework for the simultaneous batching and scheduling of multiproduct batch plants is proposed in this work. The scheduling decisions are formulated according to campaign-based operation mode. When a stable context can be assumed on the time horizon taken into account, this operation mode assures a more efficient production management. In addition, sequence-dependent changeover times and different unit sizes for parallel units in each stage are considered. Given the plant configuration and unit sizes, the total amount of each product to be produced and the product recipes, the proposed approach determines the number of batches that compose the production campaign and their sizes, the batches assignment to units, the sequencing of batches in each unit for each stage, and the timing of batches in each unit in order to minimize the campaign cycle time. A solution strategy is proposed to enhance the computational performance of the simultaneous optimization. The approach capabilities are shown through three numerical examples. © 2014 American Chemical Society. |
G. Corsano, Y. Fumero, J.M. Montagna Integrated decision making for the optimal bioethanol supply chain (Artículo de revista) Energy Conversion and Management, 88 , pp. 1127-1142, 2014, (cited By 1). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Corsano20141127, title = {Integrated decision making for the optimal bioethanol supply chain}, author = { G. Corsano and Y. Fumero and J.M. Montagna}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84910075619&partnerID=40&md5=a5383bb22a6af0c72e834b71edc339db}, doi = {10.1016/j.enconman.2014.04.090}, year = {2014}, date = {2014-01-01}, journal = {Energy Conversion and Management}, volume = {88}, pages = {1127-1142}, abstract = {Bioethanol production poses different challenges that require an integrated approach. Usually previous works have focused on specific perspectives of the global problem. On the contrary, bioethanol, in particular, and biofuels, in general, requires an integrated decision making framework that takes into account the needs and concerns of the different members involved in its supply chain. In this work, a Mixed Integer Linear Programming (MILP) model for the optimal allocation, design and production planning of integrated ethanol/yeast plants is considered. The proposed formulation addresses the relations between different aspects of the bioethanol supply chain and provides an efficient tool to assess the global operation of the supply chain taking into account different points of view. The model proposed in this work simultaneously determines the structure of a three-echelon supply chain (raw material sites, production facilities and customer zones), the design of each installed plant and operational considerations through production campaigns. Yeast production is considered in order to reduce the negative environmental impact caused by bioethanol residues. Several cases are presented in order to assess the approach capabilities and to evaluate the tradeoffs among all the decisions. © 2014 Elsevier Ltd. All rights reserved.}, note = {cited By 1}, keywords = {}, pubstate = {published}, tppubtype = {article} } Bioethanol production poses different challenges that require an integrated approach. Usually previous works have focused on specific perspectives of the global problem. On the contrary, bioethanol, in particular, and biofuels, in general, requires an integrated decision making framework that takes into account the needs and concerns of the different members involved in its supply chain. In this work, a Mixed Integer Linear Programming (MILP) model for the optimal allocation, design and production planning of integrated ethanol/yeast plants is considered. The proposed formulation addresses the relations between different aspects of the bioethanol supply chain and provides an efficient tool to assess the global operation of the supply chain taking into account different points of view. The model proposed in this work simultaneously determines the structure of a three-echelon supply chain (raw material sites, production facilities and customer zones), the design of each installed plant and operational considerations through production campaigns. Yeast production is considered in order to reduce the negative environmental impact caused by bioethanol residues. Several cases are presented in order to assess the approach capabilities and to evaluate the tradeoffs among all the decisions. © 2014 Elsevier Ltd. All rights reserved. |
M.G. Marcovecchio, A.Q. Novais, I.E. Grossmann Deterministic optimization of the thermal Unit Commitment problem: A Branch and Cut search (Artículo de revista) Computers and Chemical Engineering, 67 , pp. 53-68, 2014, (cited By 1). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Marcovecchio201453, title = {Deterministic optimization of the thermal Unit Commitment problem: A Branch and Cut search}, author = { M.G. Marcovecchio and A.Q. Novais and I.E. Grossmann}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84898774512&partnerID=40&md5=a5c9b1c1f7b9b9abe40cad32edaf9c69}, doi = {10.1016/j.compchemeng.2014.03.009}, year = {2014}, date = {2014-01-01}, journal = {Computers and Chemical Engineering}, volume = {67}, pages = {53-68}, abstract = {This paper proposes a novel deterministic optimization approach for the Unit Commitment (UC) problem, involving thermal generating units. A mathematical programming model is first presented, which includes all the basic constraints and a set of binary variables for the on/off status of each generator at each time period, leading to a convex mixed-integer quadratic programming (MIQP) formulation. Then, an effective solution methodology based on valid integer cutting planes is proposed, and implemented through a Branch and Cut search for finding the global optimal solution. The application of the proposed approach is illustrated with several examples of different dimensions. Comparisons with other mathematical formulations are also presented. © 2014 Elsevier Ltd.}, note = {cited By 1}, keywords = {}, pubstate = {published}, tppubtype = {article} } This paper proposes a novel deterministic optimization approach for the Unit Commitment (UC) problem, involving thermal generating units. A mathematical programming model is first presented, which includes all the basic constraints and a set of binary variables for the on/off status of each generator at each time period, leading to a convex mixed-integer quadratic programming (MIQP) formulation. Then, an effective solution methodology based on valid integer cutting planes is proposed, and implemented through a Branch and Cut search for finding the global optimal solution. The application of the proposed approach is illustrated with several examples of different dimensions. Comparisons with other mathematical formulations are also presented. © 2014 Elsevier Ltd. |
V. Bogado, S. Gonnet, H.P. Leone Modeling and simulation of software architecture in discrete event system specification for quality evaluation (Artículo de revista) Simulation, 90 (3), pp. 290-319, 2014, (cited By 0). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Bogado2014290, title = {Modeling and simulation of software architecture in discrete event system specification for quality evaluation}, author = { V. Bogado and S. Gonnet and H.P. Leone}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84897740919&partnerID=40&md5=00341a395c70dd0e852291dcd9edbf17}, doi = {10.1177/0037549713518586}, year = {2014}, date = {2014-01-01}, journal = {Simulation}, volume = {90}, number = {3}, pages = {290-319}, abstract = {Software quality is an important issue in software engineering. The evaluation of software architecture is crucial to achieve quality scenarios, which reduces development and maintenance costs. This work presents a discrete event simulation environment for the software architecture assessment considering both functional and quality aspects. Discrete event system specification (DEVS) formalism and the underlying framework are used to specify the simulation elements. DEVS is based on the system theory and follows the engineering and object-oriented fundamentals to construct complex dynamic systems. The proposed environment is built in a modular and hierarchical way that provides scalability and reusability advantages. Although the proposal is focused on three quality attributes and a few metrics, this approach enables the definition of new elements and metrics related to other quality attributes that can be visible at runtime. A traditional architecture illustrates the proposal, where preliminary computational experiments for this real software system and concrete quality scenarios demonstrate the feasibility of the integrated simulation environment for the software architecture evaluation. In addition, a discussion shows how the results could help architects make design decisions to improve software quality during development. © 2014 The Society for Modeling and Simulation International.}, note = {cited By 0}, keywords = {}, pubstate = {published}, tppubtype = {article} } Software quality is an important issue in software engineering. The evaluation of software architecture is crucial to achieve quality scenarios, which reduces development and maintenance costs. This work presents a discrete event simulation environment for the software architecture assessment considering both functional and quality aspects. Discrete event system specification (DEVS) formalism and the underlying framework are used to specify the simulation elements. DEVS is based on the system theory and follows the engineering and object-oriented fundamentals to construct complex dynamic systems. The proposed environment is built in a modular and hierarchical way that provides scalability and reusability advantages. Although the proposal is focused on three quality attributes and a few metrics, this approach enables the definition of new elements and metrics related to other quality attributes that can be visible at runtime. A traditional architecture illustrates the proposal, where preliminary computational experiments for this real software system and concrete quality scenarios demonstrate the feasibility of the integrated simulation environment for the software architecture evaluation. In addition, a discussion shows how the results could help architects make design decisions to improve software quality during development. © 2014 The Society for Modeling and Simulation International. |
2013 |
E.C. Martínez, M.D. Cristaldi, R.J. Grau Dynamic optimization of bioreactors using probabilistic tendency models and Bayesian active learning (Artículo de revista) Computers and Chemical Engineering, 49 , pp. 37-49, 2013, (cited By 4). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Martínez201337, title = {Dynamic optimization of bioreactors using probabilistic tendency models and Bayesian active learning}, author = { E.C. Martínez and M.D. Cristaldi and R.J. Grau}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84868676384&partnerID=40&md5=229621c55f0726946d56a51f5f75e954}, doi = {10.1016/j.compchemeng.2012.09.010}, year = {2013}, date = {2013-01-01}, journal = {Computers and Chemical Engineering}, volume = {49}, pages = {37-49}, abstract = {Due to the complexity of metabolic regulation, first-principles models of bioreactor dynamics typically have built-in errors (structural and parametric uncertainty) which give rise to the need for obtaining relevant data through experimental design in modeling for optimization. A run-to-run optimization strategy which integrates imperfect models with Bayesian active learning is proposed. Parameter distributions in a probabilistic model of bioreactor performance are re-estimated using data from experiments designed for maximizing information and performance. The proposed Bayesian decision-theoretic approach resorts to probabilistic tendency models that explicitly characterize their levels of confidence. Bootstrapping of parameter distributions is used to represent parametric uncertainty as histograms. The Bajpai & Reuss bioreactor model for penicillin production validated with industrial data is used as a representative case study. Run-to-run convergence to an improved policy is fast despite significant modeling errors as long as data are used to revise iteratively posterior distributions of the most influencing model parameters. © 2012 Elsevier Ltd.}, note = {cited By 4}, keywords = {}, pubstate = {published}, tppubtype = {article} } Due to the complexity of metabolic regulation, first-principles models of bioreactor dynamics typically have built-in errors (structural and parametric uncertainty) which give rise to the need for obtaining relevant data through experimental design in modeling for optimization. A run-to-run optimization strategy which integrates imperfect models with Bayesian active learning is proposed. Parameter distributions in a probabilistic model of bioreactor performance are re-estimated using data from experiments designed for maximizing information and performance. The proposed Bayesian decision-theoretic approach resorts to probabilistic tendency models that explicitly characterize their levels of confidence. Bootstrapping of parameter distributions is used to represent parametric uncertainty as histograms. The Bajpai & Reuss bioreactor model for penicillin production validated with industrial data is used as a representative case study. Run-to-run convergence to an improved policy is fast despite significant modeling errors as long as data are used to revise iteratively posterior distributions of the most influencing model parameters. © 2012 Elsevier Ltd. |
M.F. Luna, E.C. Martínez Model-based run-to-run optimization under uncertainty of biodiesel production (Artículo de revista) Computer Aided Chemical Engineering, 32 , pp. 103-108, 2013, (cited By 2). (Resumen | Enlaces | BibTeX | Etiquetas: ) @article{Luna2013103, title = {Model-based run-to-run optimization under uncertainty of biodiesel production}, author = { M.F. Luna and E.C. Martínez}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84879006463&partnerID=40&md5=f88050d3da07cfdde8a8e584550613db}, doi = {10.1016/B978-0-444-63234-0.50018-X}, year = {2013}, date = {2013-01-01}, journal = {Computer Aided Chemical Engineering}, volume = {32}, pages = {103-108}, abstract = {A significant source of uncertainty in biodiesel production is the variability of feed composition since the percentage and type of triglycerides varies considerably across different raw materials. Also, due to the complexity of both transesterification and saponification kinetics, first-principles models of biodiesel production typically have built-in errors (structural and parametric uncertainty) which give rise to the need for obtaining relevant data through experimental design in modeling for optimization. A run-to-run optimization strategy which integrates tendency models with Bayesian active learning is proposed. Parameter distributions in a probabilistic model of process performance are re-estimated using data from experiments designed for maximizing information and performance. Results obtained highlight that Bayesian optimal design of experiments using a probabilistic tendency model is effective in achieving the maximum ester content and yield in biodiesel production even though significant uncertainty in feed composition and modeling errors are present. © 2013 Elsevier B.V.}, note = {cited By 2}, keywords = {}, pubstate = {published}, tppubtype = {article} } A significant source of uncertainty in biodiesel production is the variability of feed composition since the percentage and type of triglycerides varies considerably across different raw materials. Also, due to the complexity of both transesterification and saponification kinetics, first-principles models of biodiesel production typically have built-in errors (structural and parametric uncertainty) which give rise to the need for obtaining relevant data through experimental design in modeling for optimization. A run-to-run optimization strategy which integrates tendency models with Bayesian active learning is proposed. Parameter distributions in a probabilistic model of process performance are re-estimated using data from experiments designed for maximizing information and performance. Results obtained highlight that Bayesian optimal design of experiments using a probabilistic tendency model is effective in achieving the maximum ester content and yield in biodiesel production even though significant uncertainty in feed composition and modeling errors are present. © 2013 Elsevier B.V. |