From 316aa2a21b47a13045a057f165749b5c15c41e5b Mon Sep 17 00:00:00 2001
From: Nikhil Ambardar <202-nikaviator@users.noreply.git-st.inf.tu-dresden.de>
Date: Tue, 20 Oct 2020 12:50:02 +0200
Subject: [PATCH] mm

---
 Bibliography.bib             | 121 +++++++++--------
 sections/back.tex            |  96 +++++++-------
 sections/conc.tex            |  52 ++++++--
 sections/eval.tex            |  49 ++++---
 sections/figures.tex         |   4 +-
 sections/imp.tex             | 246 ++++++++++++++++++++++++-----------
 sections/intro.tex           |  76 +++++------
 sections/motiv.tex           |  12 +-
 sections/soa.tex             |  85 ++++++------
 task_description/task_de.tex |   4 +-
 thesis.tex                   |  10 +-
 11 files changed, 455 insertions(+), 300 deletions(-)

diff --git a/Bibliography.bib b/Bibliography.bib
index a73f9c9..9ce0e38 100644
--- a/Bibliography.bib
+++ b/Bibliography.bib
@@ -1,3 +1,4 @@
+
 @Book{1,
   title={Robotics Modelling,Planning and Control Advanced Textbooks in Control And Signal Processing Book},
   author={Bruno Siciliano Lorenyo Sciavicco Luigi Villani Giuseppe Oriolo},
@@ -5,8 +6,6 @@
   year={2009},
   publisher={Springer}
 }
-
-
 @Article{2,
   author    = {Oxford English Dictionary},
   title     = {Definition Of Robots},
@@ -26,14 +25,14 @@
   language  = {english},
 }
 @Article{4,
-  title={Contact Systems Pick and Place Robots https://web.archive.org/web/20080914050602/http://www.contactsystems.com/c5_series.html},
+  title={Contact Systems Pick and Place Robots },
   author={Bruno Siciliano Lorenyo Sciavicco Luigi Villani Giuseppe Oriolo},
   volume={1},
   year={2008},
   publisher={WayBack Machine}
 }
 @Article{5,
-  title={"Smart Caddy"},
+  title={Smart Caddy},
   author={Seegrid},
   volume={1},
   year={2011},
@@ -56,65 +55,57 @@
   year={2012},
   publisher={Wayback Machines}
 }
-
 @Article{7,
   title={"In the Lab:Robots That Slink and Squirm"},
   author={John Schwartz},
   volume={1},
   year={2015},
   publisher={The New York Times},
-  title={"Squishy Robots Now Have Squishy Computers To Control Them"},
-  author={Kat Eschner},
-  volume={1},
-  year={2019},
-  publisher={Popular Science},
   title={"The Softer Side Of Robotics"},
   author={hp},
   volume={1},
   year={2019},
   publisher={Hewlett Packard}
 }
-@Article{8,
-  title={"An Overview Of Handy 1.A Rehabilitaton Robot For The Severely Disabled"},
-  author={Topping ,Mike;Smith,Jane},
-  volume={1},
-  year={1999},
-  publisher={Center on Disabilities Conference Proceedings},
-  title={"Welcome To The Ageing Future"},
-  author={Jeavans,Christine},
-  volume={1},
-  year={2016},
-  publisher={BBC News},
-  title={"Statistical Handbook Of Japan:Chapter 2 Population"},
-  author={Wayback Machines},
-  volume={1},
-  year={2013},
-  publisher={Statistics Bureau & Statistical Research and Training Institute},
-  title={"Robotic future of patient care"},
-  author={E-Health Insider},
-  volume={1},
-  year={2007},
-  publisher={Wayback Machine}
-}
-@Article{9,
-  title={"Squishy Robots Now Have Squishy Computers To Control Them"},
-  author={Kat Eschner},
-  volume={1},
-  year={2019},
-  publisher={Popular Science},
-  title={"Squishy Robots Now Have Squishy Computers To Control Them"},
-  author={Kat Eschner},
-  volume={1},
-  year={2019},
-  publisher={Popular Science}
+
+@Article{8,title={"Squishy Robots Now Have Squishy Computers To Control Them"},
+	author={Kat Eschner},
+	volume={1},
+	year={2019},
+	publisher={Popular Science}
 }
-@Article{10,
-  title={"Construction Robotics Industry Set To Double by 2023"},
-  author={Pollock,Emily},
-  volume={1},
-  year={2018},
-  publisher={engineering.com}
+
+@Article{9,title={"Construction Robotics Industry Set To Double by 2023"},
+	author={Pollock,Emily},
+	volume={1},
+	year={2018},
+	publisher={engineering.com}
 }
+
+@Article{10,  
+    title={"An Overview Of Handy Rehabilitaton Robot For The Severely Disabled"},
+	author={Topping ,Mike;Smith,Jane},
+	volume={1},
+	year={1999},
+	publisher={Center on Disabilities Conference Proceedings},
+	
+	title={"Welcome To The Ageing Future"},
+	author={Jeavans,Christine},
+	volume={1},
+	year={2016},
+	publisher={BBC News},
+	
+	title={"Statistical Handbook Of Japan:Chapter 2 Population"},
+	author={Wayback Machines},
+	volume={1},
+	year={2013},
+	publisher={Statistics Bureau & Statistical Research and Training Institute},
+	
+	title={"Robotic future of patient care"},
+	author={E-Health Insider},
+	volume={1},
+	year={2007}
+  }
 @Book{11,
   title={Elements Of Robotics},
   author={Mordechai Ben-Ari,Francesco Mondada},
@@ -123,11 +114,11 @@
   publisher={Springer Open}
 }
 @Article{12,
-  title={"Evolution Of Robots Throughout History From Hephaestus To Da Vinci Robot"},
+  title={Evolution Of Robots Throughout History From Hephaestus To Da Vinci Robot},
   author={Christos Iavazzo Xanthi-Ekaterini D.Gkegke Paraskevi-Evangelia Iavazzo Ioannis D.Gkegkes},
   volume={1},
   year={2014},
-  publisher={"RAZVOJ ROBOTA KROZ POVIJEST DO „DA VINCIJEVOG ROBOTA“}
+  publisher={RAZVOJ ROBOTA KROZ POVIJEST DO DA VINCIJEVOG ROBOTA}
 }
 @Book{13,
   title={Franka Panda User Guide},
@@ -150,10 +141,8 @@
   year={2015},
   publisher={Springer}
 }
-
-
 @Article{16,
-  title={"Safety  Issues  in  Human-Robot  Interactions"},
+  title={Safety  Issues  in  Human-Robot  Interactions},
   author={Milos Vasic1, Aude Billard},
   volume={1},
   year={2013},
@@ -164,3 +153,27 @@
   year={2015},
   publisher={United States Department Of Labor}
 }
+
+@Article{17,
+	title={Formalizing Class Diagram In UML},
+	author={Alireza Souri,Mohammad ali Sharifloo,Monire Norouzi},
+	volume={1},
+	year={2007},
+	publisher={University College Of Nabi Akram ,Tabriz Iran}
+	
+}
+@Article{18,title={Introduction To BPMN},
+	author={Stephen A. White},
+	volume={1},
+	year={2007},
+	publisher={IBM Corporation}
+	
+}
+@Article{19,
+	title={On Making Robots Understand Safety:Embedding Injury Knowledge Into Control},
+	author={Sami Haddadin, Simon Haddadin,Augusto Khoury,Tim Khoury,Sven Parusel},
+	volume={1},
+	year={2007},
+	publisher={IEEE,ICRA}
+}
+
diff --git a/sections/back.tex b/sections/back.tex
index f64a060..a464dd9 100644
--- a/sections/back.tex
+++ b/sections/back.tex
@@ -1,79 +1,83 @@
 \chapter{BACKGROUND}\label{ch:basics}
 
+\section{About Franka Panda Emika Robot}
 
-\section{About Franka Panda Emika Robot\\}
+In today's time Robotics is still a relatively modern research area .It is still developing and an interest of research.This is a costly device and can be accessed only to some people. Reasons being high costs, complex and difficult programming and most critical being able to separate humans and robots by safety fences or zones. Th perennial question remained that how can this complex technology be made available to a more general people and academicians.Franka Emika GmbH, the high-tech company from Munich, came ahead to address to this situation and provide a solution. Robot Panda is a sensitive and extremely versatile power tool at an affordable cost for research fraternity to study and experiment. It is also the first system of an entirely new generation of tools, which are developed with the following main objectives: first as a research robot, then as a co-worker in a factory and finally as an assistant in daily life for elderly or sick people helping them with a friendly appearance. \cite{13}. \\
 
+The system is accessed and used using Apps like a smartphone and be taught new Tasks within only a several minutes, without requiring any major technical or programming know how. The system is so sensitive at same time upto a level that it can take over assembling, testing or inspecting tasks next to a human coworker without the need for a safety fence as it has all precautions and safety measures built inside.The online forum presents a platform called Franka World and this  represents the center of this ecosystem, in which the community can interact, developers and clients can be assigned and new solutions and applications are provided.\\
+
+This is a system developed by a globally leading German robot technology company, and is now produced in series in Allgäu, Bavaria. Panda research has been providing use for the research community since August 2017 and later in 2018 it became ready for use in industry, and this is only the beginning of a new generation of universal tools.
+
+\begin{figure}
+	\centering
+	\includegraphics[width=0.8\linewidth]{../../FRANKA_EMIKA}
+	\caption{ Robot Franka Panda Emika Image Source:https://robots-blog.com/2016/05/10/franka-emika-everybodys-robot/ }
+	\label{fig:frankaemika}
+\end{figure}
 
-\cite{13}Even today, robotics remains a technology accessible only to few. The reasons for this are the high costs, difficult programming and the separation of humans and robots by safety fences. So how can this complex technology be made accessible to the general population.Franka Emika GmbH, the high-tech company from Munich, want to solve this problem. Panda which is a sensitive and extremely versatile power tool. Panda is the first system of an entirely new generation of tools, which are developed with the following main objectives: first as a research robot, then as a colleague in a factory and finally as an assistant in daily life for elderly or sick people. 
-The system can be operated via Apps like a smartphone and be taught new Tasks within a few minutes, without requiring any programming skills. At the same time the system is sensitive to such an extent, that it can take over assembling, testing or inspecting tasks next to a human colleague without the need for a safety fence.The online platform Franka World represents the center of this ecosystem, in which the community can interact, developers and clients can be assigned and new solutions and applications are provided.
-The system was developed based on the globally leading German robot technology, and is now produced in series in Allgäu, Bavaria. Panda research has been ready for use for the research community since August 2017; later in 2018 it became ready for use in industry, and this is only the beginning of a new generation of universal tools.
 \newpage
 \section{Franka Panda Emika - Features}
 
-Few Features of Robot Franka Panda are -
+Few Important Features of Robot Franka Panda are -
 
 \subsection{Sensitivity}
-The Arm has real torque sensors in all 7 joints. These enable, among other things, Sensitivity to recognize and react to even the smallest forces acting on the Arm. This sensitivity facilitates numerous functionalities and capabilities, which are not possible with conventional industrial robots, such as impedance, guiding of the robot or collision detection. It should be noted that for achieving maximum sensitivity it is absolutely necessary to best possibly compensate additional forces acting on the robot (e.g. a mounted end effector). This is why the end effector to be used should be configured as precisely as possible in the admin section of Desk. 
+The Robot FEP Arm is embedded with real torque sensors in all joints which are in total seven. It let Robot Panda, among many things, a sensitivity to recognize and react to an even minute force acting on the arm and then to joints. This sensitive capability enables many functionalities , which are not possible with traditional industrial robots, These are impedance, guiding of the robot or collision detection. To achieve maximum sensitivity it is necessary to compensate additional forces acting on the robot (e.g. a mounted end effector). This is why the end effector to be used should be configured as precisely as possible. 
 \subsection{Impedance}
-Impedance is a behavior of the robot, which imitates the ability of a mechanical spring. This behavior can be used to interact gently with the environment, for example as not to damage fragile objects. The ability of changing impedance can be seen as similar to that of a human arm, which tenses the muscles in order to change rigidity and can adapt depending on the situation, in order to increase robustness when executing a task. 
+Impedance is that behavior of Robot Panda that imitates the ability of a mechanical spring. This capability can be used to interact gently with the environment, an instance that as not to damage any fragile object. The capability of changing impedance is akin to that of the human arm, which tenses the muscles in order to change rigidity and can adapt depending on the load and situation, in order to increase robustness when executing a task. 
 \subsection{Collision Detection and Reaction}
-We have incorporated torque sensors in all seven axes. These provide information on the currently applied torques per axis at any given time. In combination with our model-based control, deviation between the expected torque and the actual torque can be identified and the Arm can respond to it. For example, if a user reaches into the motion path of the robotic arm when it is moving, this will be recognized in real-time by one or several torque sensors. Such a torque magnification is classified as a collision and can for example stop the robot’s movement. 
+Robot panda is incorporated with torque sensors in all seven joint axes. They provide information on the currently applied torques per axis at any given time. In combination with a custom model-based control, difference or deviation between the expected torque and the actual torque is identified and the Arm can respond to it. An example is, if a user comes inside the proximity of a motion path of the robotic arm while in motion, this action will be recognized in real-time by one or several of torque sensors in joints. Such a torque magnification is classified as a collision and can in many cases classified stop the robot’s movement. 
 \subsection{Generating Forces}
-If the Arm is in intended contact with its surroundings, sensor signals of the seven torque sensors can be used to generate a defined force on the point of contact. Impedance Collision detection and reaction Generating forces. 
+If the Robot Arm is in intended contact with its surroundings, sensor signals of the seven torque sensors can be used to generate a defined force on the point of contact. Impedance Collision detection and reaction Generating forces. 
 
 \section{Software Tools Robot Franka Uses}
-The Panda research package allows for real-time, bidirectional connection between a workstation PC and the Arm. Also commanding the Hand is possible. This interface (FCI) enables the user to implement specially created controllers and applications with the robot. \footnote{https://moveit.ros.org/ , http://gazebosim.org/ , https://github.com/frankaemika/ , https://erdalpekel.de/?p=55}The package consists of libfranka, a C++ program library, and frankaros, an ROS interface with ROS Control and MoveIt integration. I am also using Gazebo which is a real life simulator used to run and check simulations of Robot Panda.
-
-\newpage
+The Panda research package allows for real-time, bidirectional connection between a workstation PC and the Arm. Even commanding the Robot Hand directly is possible. This so called interface (FCI) enables the users to execute so called specially created controllers and custom made applications with the robot. \footnote{https://moveit.ros.org/ , http://gazebosim.org/ , https://github.com/frankaemika/ , https://erdalpekel.de/?p=55}The package consists of libfranka, a C++ program library, and frankaros, an ROS interface with ROS Control and MoveIt integration. Gazebo was used and this is a simulating software used to run and check simulations of Robot Panda.
 
 \section{Coexistence With Humans And Meaning Of Cobots}
-\cite{14} Cobots, or collaborative robots, are robots intended for direct human robot interaction within a shared space, or where humans and robots are in close proximity. Cobot applications contrast with traditional industrial robot applications in which robots are isolated from human contact. Cobot safety may rely on lightweight construction materials, rounded edges, and inherent limitation of speed and force, or on sensors and software that ensures safe behavior.
-The International Federation of Robotics (IFR), a global industry association of robot manufacturers and national robot associations, recognizes two types of robots – 1)industrial robots used in automation (in an industrial environment) and 2) service robots for domestic and professional use. Service robots could be considered to be cobots as they are intended to work alongside humans. Industrial robots have traditionally worked separately from humans behind fences or other protective barriers, but cobots remove that separation.
-Cobots can have many uses, from information robots in public spaces (an example of service robots) , logistics robots that transport materials within a building , to industrial robots that help automate unergonomic tasks such as helping people moving heavy parts, or machine feeding or assembly operations.
-The IFR defines four types of collaborative manufacturing applications.
+Cobots, or so called collaborative robots, are robots intended for direct human robot interaction inside a common space or area, or somewhere humans and robots are needed in close proximity. Cobotic applications differ from traditional industrial robotic applications in which robots are isolated from human contact. Cobotic safety relies in some cases on lightweight construction materials, curved edges and rounded points, and inherent limitation of speed and force, or on sensors and software that ensures safe behavior \cite{14}.\\
+
+The International Federation of Robotics (IFR), a global industry association of robot manufacturers and national robot associations, has identified two types of robots – a.industrial robots used in automation (in an industrial setting) and b.domestic and professional usage by service robots. Service robots could be considered to be Cobots as they are intended to work alongside humans. Industrial robots have conventionally worked away and apart from humans and behind fences or other protective barriers, but Cobots remove that separation.\\
+
+Cobots can have many uses, from information robots in public spaces (an example of service robots) , logistics robots that transport materials within a building , to industrial robots that help automate un-ergonomic tasks such as helping people moving heavy parts, or machine feeding or assembly operations.
+The IFR defines four types of collaborative manufacturing applications.\\
 \subsection{Co-existence}
-Human and robot work alongside each other, but with no shared workspace.
+Human and Cobots work alongside each other, but with no common work areas but only separately.
 \subsection{Sequential Collaboration}
-Human and robot share all or part of a workspace but do not work on a part or machine at the same time.
-\subsection{Co-operation} 
-Robot and human work on the same part or machine at the same time, and both are in motion.
+Human and Robot share all or some part of a common workspace but do not work on a modular part or of machine at the same time.
+\subsection{Co-operation}
+Robot and human work on the same part or machine at the same time, and both are
+in motion.
 \subsection{Responsive Collaboration}
-The robot responds in real-time to the worker’s motion.
-In most industrial applications of cobots today, the cobot and human worker share the same space but complete tasks independently or sequentially (Co-existence or Sequential Collaboration.) Co-operation or Responsive Collaboration are presently less common.
-\newpage
-\section{Consequences Of Accidents / Malfunctions}
+The robot responds in real-time to the worker’s motion. In most industrial applications
+of Cobots today, the Cobot and human worker share the same space but complete
+tasks independently or sequentially (Co-existence or Sequential Collaboration.) Cooperation or Responsive Collaboration are presently less common.
+
+\section{ Consequences Of Accidents / Malfunctions}
+
+Accidents in a Dense Human-Robot Co-existing Scenarios Vasic and etc. gave a detailed overview on safety issues in human-robot interactions \cite{15}. Starting to define about industry, the danger is when a human gets trapped between robot and an object (e.g.a wall) or when human collides with a robot causing injury. A detailed list of significant hazards and damages are including: Mechanical, Electrical, Thermal, Noise, Vibration,Radiation, Material/Substance, Ergonomic, the hazards associated with environment and combined hazards. The hazard should be analyzed and minimized from technical points of view, however, in the real applications, there are still unexpected errors and failures which can not be exactly predicted:\\
 
-\cite{15}Hazards in Dense Human-Robot Co-existing Scenarios Vasic and etc. gave a detailed survey on safety issues in human-robot interactions [4]. Starting from industry, the danger comes when human gets trapped between robot and an object (e.g. a wall) or when human comes into collision with a robot [5]. A detailed list of significant hazards can be found in ISO 10218-1 as annex, including: Mechanical, electrical, thermal, noise, vibration, radiation, material/substance, ergonomic, the hazards associated with environment and combined hazards. The hazard should be analyzed and minimized from technical points of view, however, in the real applications, there are still unexpected errors and failures which can not be exactly predicted: 
 \subsection{Mechanics Failure}
-Aging of motors, connectors
+Aging of motors, connectors causing malfunction or effects not desired.
 \subsection{Electronics Failure}
-Aging of components and isolation material, out of power half the way of operation
+Aging of components and insulation material, out of power, half and the way of operation.Can cause shocks.
 \subsection{Program Failure}
-Program bugs, untested scenarios
+Program bugs, untested scenarios and cases ,unintended actions.
 \subsection{Operational Error}
 Untrained Engineers, operators, and users
+While robot go out into the factory and to family or other social places, the above written traditional rules are not applicable. The situation is similar to that of computer going from military use to civil and then personal use. The difference is however the actuation, the capability of active physical motions brings more potential hazards.Moreover, when the robot enters an open environment where changes are imminent at anytime , there users are most often un-professional and un-experienced people.Animals (e.g. pets) can be living beings easily to come into close contact with the robot, which might even bring damage to robot. (e.g. a child might see a home service robot and pour water onto it just out of curiosity.) Due to these obstacles, the most sold service robot now is still household robots, which are small in size, carry out comparatively simple and fixed tasks.
 
-While robot go out to factory and into family or other social areas, the above conventional rules become invalid. The situation is similar to that of computer going from military use to civil and then personal use. The difference is however the actuation, the capability of active physical movement brings more potential hazards. Moreover, the robot enters an open environment where changes may happen anytime and anyhow, the users are most often non-professional and unexperienced people. Animals (e.g. pets) might come into close contact with the robot, which might even bring damage to robot. (e.g. a child might see a home service robot and pour water onto it just out of curiosity.) Due to these obstacles, the most sold service robot now is still household robots, which are small in size, carry out comparatively simple and fixed tasks.
-
-\newpage
-
-\section{Safe Robot Deployment Practices And Making Robots Safer }
-
-\cite{16} Collaborative robots are poised to take off in the next few years, says WSPS consultant Robert Vomiero. With built-in safety limits, sensors and other safety functionality, “cobots” offer exciting benefits — they’re able to work more closely with humans, can be cheaper and easier to integrate and maintain, and promise better cycle time and productivity.
-
-But this doesn’t mean organizations anxious to embrace the new technology can rest easy when it comes to health and safety. Crushing and impact hazards remain, requiring special safety measures and a greater emphasis on certain aspects of your health and safety program.
+\section{Safe Robot Deployment Practices And Making Robots Safer}
 
-Many hazards and injuries can result from the use of robots in the workplace. Some robots, notably those in a traditional industrial environment, are fast and powerful. This increases the potential for injury as one swing from a robotic arm, for example, could cause serious bodily harm. There are additional risks when a robot malfunctions or is in need of maintenance. A worker that is working on the robot may be injured because a malfunctioning robot is typically unpredictable. For example, a robotic arm that is part of a car assembly line may experience a jammed motor. A worker that is working to fix the jam may suddenly get hit by the arm the moment it becomes unjammed. Additionally, if a worker is standing in a zone that is overlapping with nearby robotic arms, he or she may get injured by other moving equipment. 
+Collaborative robots are ready to take flight in the next few years, and this is imminent.With in built safety range limits, sensors and other safety functionality, “Cobots” offer exciting benefits — they’re able to work more closely with humans and also among them, they can be affordable and easier to integrate and maintain, and promise better cycle time and productivity \cite{16}.\\
 
-There are four types of accidents that can occur with robots: impact or collision accidents, crushing and trapping accidents, mechanical part accidents, and other accidents. Impact or collision accidents occur generally from malfunctions and unpredicted changes. Crushing and trapping accidents occur when a part of a worker’s body becomes trapped or caught on robotic equipment. Mechanical part accidents can occur when a robot malfunctions and starts to "break down," where the ejection of parts or exposed wire can cause serious injury. Other accidents at just general accidents that occur from working with robots.
+This however doesn’t mean the organizations working towards to embrace this new technology can rest easy when it comes to health and safety. Crushing and impact
+hazards and dangers remain, requiring special safety measures and a greater emphasis on certain aspects of your health and safety program.\\
 
-There are seven sources of hazards that are associated with human interaction with robots and machines: human errors, control errors, unauthorized access, mechanical failures, environmental sources, power systems, and improper installation. Human errors could be anything from one line of incorrect code to a loose bolt on a robotic arm. Many hazards can stem from human-based error. Control errors are intrinsic and are usually not controllable nor predictable. Unauthorized access hazards occur when a person who is not familiar with the area enters the domain of a robot. Mechanical failures can happen at any time, and a faulty unit is usually unpredictable. Environmental sources are things such as electromagnetic or radio interference in the environment that can cause a robot to malfunction. Power systems are pneumatic, hydraulic, or electrical power sources; these power sources can malfunction and cause fires, leaks, or electrical shocks. Improper installation is fairly self-explanatory; a loose bolt or an exposed wire can lead to inherent hazards.
+Most injuries and hazards do result from the use of robots at the workplace. Some robots, notably those in a traditional industrial environment, are brisk and powerful in their work. This increase the probability to cause injury to a human as one swing from a robotic arm, for example, could cause serious bodily harm needing serious medical attention and costs. Additional risks are present when a robot malfunctions or is in need of maintenance. A worker that is working on the robot may be injured because a malfunctioning robot is typically unpredictable and uncontrolled. For example, a robotic arm that is part of a supply chain for online delivery company may experience a jammed motor. A worker that is working to fix the jam may suddenly get hit by the arm the moment it becomes un-jammed. Additionally, if a worker is standing in a zone that is overlapping with nearby robotic arms, he or she may get injured by other moving equipment.\\
 
-Besides regular maintenance, the above listed hazards are minimized in industry applications by: 
-– strictly pre-defined environment and space (robot cell);
-– strictly pre-defined operation routine;
-– authorization of properly trained operators, maintenance workers and programmers;
-– speed limitation when human is present;
-– protective stop function and an independent emergency stop function.
+There are many types of accidents that can occur with robots but four of them are major classifications: crushing and trapping accidents,impact or collision accidents,mechanical part accidents, and other miscellaneous accidents. Impact or collision accidents occur generally from malfunctions and un-predicted changes. Crushing and trapping accidents occur when a part of a worker’s body becomes trapped or caught on robotic equipment. Mechanical part accidents occur when a robot starts to "break down or malfunction," where the ejection of parts or exposed wire can cause serious injury. Other accidents at just general accidents that occur from working with robots and consist of general injury or shocks that occour.\\
 
+Out of many sources of hazards classified there are seven associated with human interaction with robots and machines: unauthorized access,human errors, mechanical failures,control errors, environmental sources,improper installation, and power systems. Human errors are anything from one line of incorrect code to a loose bolt
+on a robotic arm. Many hazards can stem from human-based error. Environmental sources are things such as electromagnetic or radio interference in the environment
+that can cause a robot to malfunction. Power systems are pneumatic, hydraulic, or electrical power sources; these power sources can malfunction and cause fires, leaks,or electrical shocks. Improper installation is fairly self-explanatory; a loose bolt or an exposed wire can lead to inherent hazards.\\
 
+Besides regular maintenance, the above listed hazards are minimized in industry applications by: – strictly pre-defined environment and space for the cell of robot; – strictly followed and pre-defined operation routine; – authorization of properly trained operators, maintenance workers and programmers; – speed limitation when human is present; – protective stop function and an independent emergency stop function.
\ No newline at end of file
diff --git a/sections/conc.tex b/sections/conc.tex
index a34ecf8..34d1f04 100644
--- a/sections/conc.tex
+++ b/sections/conc.tex
@@ -1,23 +1,57 @@
 \chapter{CONCEPT}\label{ch:conclusion}
 
 	\section{The Models}
-	I was given tasks to design three models namely World Model, Application Model and Safety Model . The world model describes the world of Robot Panda in general i.e. about what is inside the surroundings of the robot apart from the Robot itself and then features and attributes of all components in World Model.I used UML Class Diagrams to demonstrate the World Model . I use each component to describe it as a class showing attributes which is then also used to derive an object diagram which shows the instance of the Class Diagram.For this i used the tool Online Visual Paradigm.\\
+
+	Tasks in thesis work were to design three models namely World Model, Application Model and Safety Model and implement them in a C++ program.The world model describes the world of Robot Panda in general i.e. about what is inside the surroundings of the robot apart from the Robot itself and then features and attributes of all components in World Model.In technical terms this is the world of the robot that exists in real world and components in this world are the Robot FPE itself with its arm, the object like cube or ball which can act as obstacle or an object that can be picked up by the robot arm and a human or human arm as is shown in figure below Fig 5.1 and 5.2.\\
 	
-	Next i made was an Application Model and for this I used Business Process Modelling Notation to demonstrate the diagram . I used each component to show it as a frame and then used the main frame of Robot to show the flow of task in application diagram from it to other components using the components of BPMN like if condition ,start and stop events and process events etc which then culminate to an end event in the robot frame. These flows show the flow of application in this Diagram for which i used the tool Modelio.\\
+	This is a model where human / object - robot interaction operation is depicted as a layman showing the following diagram.Technically UML Class Diagrams were used to demonstrate the World Model .Each component was used to describe it as a class showing attributes which is then also used to derive an object diagram which shows the instance of the Class Diagram.
+		
+\begin{figure}
+	\centering
+	\includegraphics[width=0.8\linewidth]{../../bras-robotique-choisir-comparaison}
+	\caption{Image Source : Panda Skills Sensivity Video Screenshot}
+	\label{fig:bras-robotique-choisir-comparaison}
+\end{figure}
+
+\newpage
 	
-	I then made a Safety Model diagram using Modelio showing use case about how robot reacts when detecting obstacles and adds safety in the cell.
+	Next an Application Model was designed and for this Business Process Modelling Notation as a tool was used to demonstrate the diagram . Each component in the World model was used to show itself as a frame in this Application diagram and then the components shown inside as connected together giving a logical flow.The main frame of Robot in BPMN is used to show the flow of task in application diagram from it to other frames using the components of BPMN like "if" condition and process events in addition to start and stop events symbols etc, which then culminate to an end event in the main robot frame once again. These flows show the logical flow of process in this Diagram for which the tool Modelio \footnote{https://www.modelio.org/} was used.\\
 	
-	\section{Programming Of Hardware And Software Components - Purpose And Concepts\\}
+	A Safety Model diagram using UML State Chart was made showing use case about how robot reacts when detecting obstacles and adds safety in the cell.This was also designed in Modelio tool and uses small components from BPMN. This can also be related with Haddadin Safety Model which is later described and talked about how it can be extended. \\
 	
-
-	In Hardware point we have the Franka Robot which has a arm and the arm has joints described by J1...n . We also then have other objects in the world namely obstacles which can be one or more human and then non living ones . Then we have grasp objects which can be a ball or a cube. The Robot Panda has Motion Planning attributes which can be altered. Most of them are in MoveIT Rviz in the GUI for it. The state of robot arm is described by the coordinate position of joints of the arm. We can as well alter the many other parameters like torque on the arm in simulations.\\
+	This three models are described in detail in the previous chapter.
+	
+	
+	\begin{figure}
+		\centering
+		\includegraphics[width=1.0\linewidth]{../MzIxNDU3NA}
+		\caption{Robot Panda Picking Up Object Image Source : https://blog.generationrobots.com/en/list-of-criteria-to-look-at-before-buying-a-robot-arm/panda-franka-emika-care-robot-arm-2/}
+		\label{fig:mzixndu3na}
+	\end{figure}
 	
+	\newpage
 	
-	In the Software section we use ROS which is the Robot Operating System and Catkin which builds the workspace. I used MoveIT then to do motion planning for the robot and planning motion around the obstacles . All the plans are then run in real life simulator for which i used Gazebo Simulation which is used to replicate real life conditions and run robot inside this .This gives me an idea if the robot can perform as planned and expected in real life.To use MoveIT i build my workspace using catkin . I use the Erdal´s repos in the workspace which are frankaros , pandamoveITconfig and pandasimulation.\\ 
+	\section{Programming Of Hardware And Software Components - Purpose And Concepts}
+			
+	In hardware point we have the Franka robot which has a arm and the arm has joints described by J1...n . We also then have other objects in the world namely obstacles which can be one or more human and then non living ones like a cube ,box or ball. Then we have grasp objects which can be a cube or an item to pick.\\
+   
+    The Robot Panda has Motion Planning attributes which can be altered. Most of them are in MoveIT Rviz which is the GUI described in detail in the following chapter. The state of robot arm is described by the coordinate position of joints of the arm. We can as well alter the many other parameters like torque on the arm in simulations.The Figure 5.3 shows the robot panda with joints .\\
 	
-    \section{Connecting Everything - Real Simulations\\}
+	\begin{figure}
+		\centering
+		\includegraphics[width=0.8\linewidth]{../../media_166_16680da7-1c13-47f9-83f0-e777befaf95d_phpjVXSN3}
+		\caption{Robot Panda Joints Image Source : https://www.chegg.com/homework-help/questions-and-answers/panda-franka-emika-shown-belowis-innovative-lightweight-robot-intended-friendly-andsafe-hu-q35002486}
+		\label{fig:media16616680da7-1c13-47f9-83f0-e777befaf95dphpjvxsn3}
+	\end{figure}
+		
+	In the Software section ROS \footnote{https://www.ros.org/} is used which is the Robot Operating System and Catkin which builds the workspace.ROS is started using "roscore" command in terminal to start the ROS. MoveIT is then used to do motion planning for the robot and planning motion around the obstacles . All the plans are then run in real life simulator for which Gazebo Simulation was used, which is used to replicate real life conditions and run robot inside it .This gives an idea if the robot can perform as planned and expected in real life with torque on joints.To use MoveIT ,workspace is built using catkin. Erdal´s repos are used and essential in the workspace which are frankaros , pandamoveITconfig and pandasimulation and they are imperative to build the workspace. 
+	
+	
+	
+    \section{Connecting Everything - Simulations\\}
     
-   Gazebo Simulator is run alongside MoveIT motion planner which lets me replicate the MoveIT motion plan in Gazebo simulator. 
+   Gazebo Simulator is run alongside MoveIT motion planner which help replicate the MoveIT motion plan in Gazebo simulator and thus let us manipulate the robot using motion plan in Rviz . 
+
     
 
 
diff --git a/sections/eval.tex b/sections/eval.tex
index e065b26..52f838b 100644
--- a/sections/eval.tex
+++ b/sections/eval.tex
@@ -5,25 +5,42 @@
 \subsection{WORLD MODEL TEST RESULTS}
 
 The implementation section gives out results for the Position and Orientation of Joints of Robot Franka Panda Emika .\\
-They are 7 in number but only about 3 are shown here as a sample.Here are the results as it is on output.\\
-
-pandalink1 Position is x=0,y=0,z=0.333\\
-pandalink2 Position is x=0,y=0,z=0.333\\
-pandalink3 Position is x=-0.120566,y=-1.81861e05,z=0.625095  \\
-
-pandalink1 Orientation is w=1,x=0,y=0,z=7.54195e05\\
-pandalink2 Orientation is w=0.693616,x=-0.693595,y=-0.137573,z=-0.137468\\
-pandalink3 Orientation is w=0.980906,x=1.16121e05,y=-0.194483,z=8.93912e05...\\
 
+They are 7 in number but only about 3 are shown here as a sample.Here are the results .
+
+\lstset {language=C++}
+\lstset{
+	basicstyle=\fontsize{10}{11}\selectfont\ttfamily
+}
+\begin{lstlisting}
+pandalink1 Position is x=0,y=0,z=0.333
+pandalink2 Position is x=0,y=0,z=0.333
+pandalink3 Position is x=-0.120566,y=-1.81861e05,z=0.625095  
+....	
+pandalink1 Orientation is w=1,x=0,y=0,z=7.54195e05
+pandalink2 Orientation is w=0.693616,x=-0.693595,y=-0.137573,z=-0.137468
+pandalink3 Orientation is w=0.980906,x=1.16121e05,y=-0.194483,z=8.93912e05
+....
+\end{lstlisting}
 The robot was moved in Gazebo Simulator after a motion planning trajectory execution in Rviz motion planner.\\
-The node was again built and run giving new values for position and orientation of joints.\\
 
-pandalink1 Position is x=0,y=0,z=0.333\\
-pandalink2 Position is x=0,y=0,z=0.333\\
-pandalink3 Position is x=-0.00290409,y=0.296506,z=0.442232 \\
-
-pandalink1 Orientation is w=0.703636,x=0,y=0,z=0.710561 \\
-pandalink2 Orientation is w=0.120731,x=-0.695508,y=-0.127549,z=0.696724 \\
+The node was again built and run giving new values for position and orientation of joints.
+
+\lstset {language=C++}
+\lstset{
+	basicstyle=\fontsize{11}{11}\selectfont\ttfamily
+}
+\begin{lstlisting}
+pandalink1 Position is x=0,y=0,z=0.333
+pandalink2 Position is x=0,y=0,z=0.333
+pandalink3 Position is x=-0.00290409,y=0.296506,z=0.442232 
+....
+pandalink1 Orientation is w=0.703636,x=0,y=0,z=0.710561 
+pandalink2 Orientation is w=0.120731,x=-0.695508,y=-0.127549,z=0.696724 
 pandalink3 Orientation is w=0.443721,x=-0.314109, y=0.478016,z=0.68989
+....
+\end{lstlisting}
+
+This show a changed set of values for position and orientation for all joints of Robot Franka Panda .
 
 
diff --git a/sections/figures.tex b/sections/figures.tex
index 44ab0a4..78ecd78 100644
--- a/sections/figures.tex
+++ b/sections/figures.tex
@@ -2,8 +2,8 @@
 \begin{figure}[h]
 \centering
 \includegraphics[scale=0.8]{OMG_MOF_4levels}
-\caption{Das ist eine schlechte Grafik --- zu viele Pixel. Versuche Vektorgrafiken zu nutzen. Selbst malen geht gut mit draw.io powerpoint
-  oder inkscape}\label{fig:mof}
+\caption{CAPTION REPLACED
+  }\label{fig:mof}
 \end{figure}
 
 Wenn eine Abbildung verwendet wird, muss diese immer unbedingt im Text referenziert und beschrieben werden.
diff --git a/sections/imp.tex b/sections/imp.tex
index dc3e070..b231e65 100644
--- a/sections/imp.tex
+++ b/sections/imp.tex
@@ -1,107 +1,205 @@
 \chapter{IMPLEMENTATION}\label{ch:conclusion}
 
-\section{About The Task}
+\section{Going To Details}
+
 \subsection{WORLD MODEL IMPLEMENTATION}
-The task is about implementation of the designed World Model using C++ code. The program written ,described as a node is used to display the values of robot joints position and orientation values in addition to World Model program implementation. The program connects Rviz motion planner to Gazebo simulator by launching a launch file which launches the Rviz and Gazebo. The Robot can be manipulated using a motion plan in Rviz and then simulation can be used to run the plan in Gazebo.  
-The program is designed to take values from the robot in Gazebo and display them in output.So after launching the Rviz and Gazebo using launch file ,the program is built and run and displays the position and orientation values of robot joints .
-Then the Motion is planned using Rviz and executed in Gazebo thus changing the over all position of robot. After this stage the program is once again built and run and this now displays the new changed values for robot in Gazebo simulator. The results are then shared in evaluation section. 
+The task is about implementation of the designed World Model using C++ code. The program written ,described as a node is used to display the values of position and orientation of robot joints in addition to World Model program implementation which include other classes and objects. The program is connected with Rviz motion planner and to Gazebo simulator by launching a launch file which launches the Rviz \footnote{http://wiki.ros.org/rviz/UserGuide} and Gazebo \footnote{http://gazebosim.org/} and the node file \footnote{https://git-st.inf.tu-dresden.de/nikaviator/zero/-/blob/master/src/\lstinline|robot_models_node.cpp|}. The Robot can be manipulated using a motion plan in Rviz and then plan can be used to run the simulation in Gazebo Simulator.\\
+
+The program code is designed to take values from the robot in Gazebo Simulator and display them in output.The program also builds the object structure by initializing values and assigning them values.Every class designed is connected to the node program using the header files and contain variables that are used to build object structure in the main node program.So after launching the Rviz and Gazebo using launch file ,the program is built and run and thus displays the position and orientation values of robot joints.\\
+
+Then the Motion is planned in Rviz using the arrows that move the parts of robots arm and executed in Gazebo thus changing the over all position of robot. After this stage the program is once again built and run and this now displays the new changed values for robot joints position and orientation in Gazebo simulator. The results are then shared in evaluation section. More details about the tools and attributes they can handle are described below.
+
+\newpage
 
 \section{Tools Used}
-The tools used for implementing the task are -
+The tools used for implementation stage in the task are -
 
 \subsection{Clion IDE} 
-This is the editor used to write C++ code and run it. It provides many aides to help reduce the programmers workload and automate the process by providing many suggestions using inbuilt libraries .
 
-\subsection{Rviz} 
-This is Motion Planner used to plan the motion of robot . This connects to simulator for running real world simulations using the Robot Operating System (ROS).
+This is the editor used to write C++ code for a project in this case task and compile and run it. It provides many aides to help reduce the programmers workload and automate the process by providing many suggestions using inbuilt libraries and pointing out logic errors in advance and helps mitigates errors and warnings that may come at a later stage.It can be started by clicking on the icon but it is recommended to start this from terminal by typing "CLion".The following figure shows how CLion IDE looks like and also talks about some of its features.\\
+
+\begin{figure}
+	\centering
+	\includegraphics[width=1.0\linewidth]{"../../Screenshot from 2020-10-14 04-08-02"}
+	\caption{CLion Screenshot}
+	\label{fig:screenshot-from-2020-10-14-04-08-02}
+\end{figure}
+
+Fig.6.1 On the left side the file structure is shown. On right side the files opened are shown under each tab. On the left the class files are visible for 11 classes under src folder and at bottom in this structure there is CMakeLists.txt file visible which is also used in this case to help set the configuration for executables. In the src folder there also exist file \lstinline|robot_models_node.cpp| visible below the .gitkeep statement. Above src folder there also exist a launch folder containing launcher.launch file in red . This contains launch commands to launch rviz , gazebo and the node file.
+
+\newpage
+
+\subsection{MoveIT-Rviz} 
+
+This is Motion Planner used to plan the motion of robot Franka Panda. This connects to simulator for running real world simulations using the Robot Operating System (ROS) and launch file.Above is screenshot for Rviz.\\
+
+\begin{figure}
+	\centering
+	\includegraphics[width=1.0\linewidth]{"../../Screenshot from 2020-10-14 04-13-00"}
+	\caption{MoveIT / RVIZ Screenshot}
+	\label{fig:screenshot-from-2020-10-14-04-13-00}
+\end{figure}
+
+
+Fig.6.2 This is what Rviz motion planner looks like. The robot is stationary in this case and can be moved using arrows displayed in different colors to attempt motion in different directions.The 7 joint of Robot Panda give it immense flexibility but there maybe some motions which are not possible. The Displays on the top left shown are showing various different parameters which Rviz can help tune and change.Some of them are Robot Description and Planning Scene Topic and there is Plan and Execute button which can be used to run the plan once the arm is moved to a new position termed as final position. The plan can be executed in Gazebo Simulator by clicking Plan and Execute button and the trajectory is visible in the Rviz and then almost immidiately the Gazebo simulator executes the motion.\\
+
+When starting RVIZ for first time there is an empty world and then Add button need to be clicked in Displays section .The option of Motion Planning is selected from the list.This enables us to set various field parameters and their values like some of them are Fixed Frame , Robot Description , Planning Scene Topic , Planning Group and Planning Request and more like Planning Trajectory and many more and they can be explored further using online tutorials provided by ROS MoveIT. 
+
+\newpage
 
 \subsection{Gazebo} 
-This is real world simulator which simulates robot motion with real world parameters tuned to check if the motion plan is executable and feasible in real world.This also provide values to program using listener in the node program.
 
+This is real world simulator which simulates robot motion with real world parameters tuned to check if the motion plan is executable and feasible in real world.This also provide values to program using listener tf buffer in the node program but this is explained in detail later.Below is a figure showing Gazebo simulator.\\
+
+\begin{figure}
+	\centering
+	\includegraphics[width=1.0\linewidth]{"../../Screenshot from 2020-10-14 04-13-12"}
+	\caption{Gazebo Screenshot}
+	\label{fig:screenshot-from-2020-10-14-04-13-12}
+\end{figure}
+
+In this task there was no need to explore functionality or fiddle any of features in Gazebo. It was used to visualize motion plan of Rviz-MoveIt and seen if this is real world feasible in Gazebo. In this tool it is possible to tune a lot of real world parameters and the ones related to Robot Panda are torques on joints
+\newpage
 \subsection{Gitlab} 
-To upload code as package to repository. 
+To upload code as package to a central repository. This is a widely used repository for students in university setting.
+It shows the projects available under the namespace . They can be clicked to view files structure under one particular project. 
+
+\begin{figure}
+	\centering
+	\includegraphics[width=1.0\linewidth]{"../Screenshot from 2020-10-14 20-16-28"}
+	\caption{Gitlab Screenshot}
+	\label{fig:screenshot-from-2020-10-14-20-16-28}
+\end{figure}
+
+\newpage
 
 \section{Programming The Implementation Of World Model}
 
-WORLD MODEL -
-To implement the World Model , ten files were created each for one of the classes. They were created as files to be included as header files(.h) in the main node program called in this case robotmodelsnode.cpp. Each header file describe the implementation of one class and its functions which are then objectified in the main node program.\\
+WORLD MODEL - To implement the World Model , ten files were created each for one of the classes. They were created as files to be included as header files(.h) in the main node program called in this case \lstinline{robot_models_node.cpp}.Each header file describe the implementation of one class and its functionalities by using variables which are then objectified in the main node program.\\
 
-The main program is used to add objects to the classes and used to input the values into variables and then run the implementation.\\
+The main program is used to add objects to the classes and used to input the values into variables using those objects and then run the implementation to display output.\\
 
 The following paragraphs describe the modules of the program used in implementation.\\
 
-- int main(int argc, char** argv) {\\
-	ros::init(argc, argv, "ROSNODE1");\\
-	ros::NodeHandle nodehandle("namespacename");\\
-	ros::AsyncSpinner spinner(1);\\
-	spinner.start();\\
-	ROSINFO("HELLO, WORLD");\\
+\lstset {language=C++}
+\lstset{
+	basicstyle=\fontsize{11}{11}\selectfont\ttfamily
+}
+\begin{lstlisting}
+int main(int argc, char** argv) {
+ros::init(argc, argv, "ROSNODE1");
+ros::NodeHandle nodehandle("namespacename");
+ros::AsyncSpinner spinner(1);
+spinner.start();
+ROSINFO("HELLO, WORLD");
+\end{lstlisting}
+	
+Here the main function is defined and the ROS node defined and initialized. Also a Node handle is specified and a spin loop is initiated to start the program .An output message is then printed which in this case is HELLO WORLD.\\
 	
-	Here the main function is defined and the ROS node defined and initialized. Also a Node handle is specified and a spin loop is initiated.An output message is then printed which in this case is HELLO WORLD.\\
+\lstset {language=C++}
+\lstset{
+	basicstyle=\fontsize{11}{11}\selectfont\ttfamily
+}
+\begin{lstlisting}
+abstractobject o1;
+graspobject o2;
+hand o3;
+humanspace o4;
+joint o5g;
+obstacle o6;
+robot o7;
+robotfpe o8;
+world o9;
+worldobject o10;
+\end{lstlisting}
 	
-  - abstractobject o1;\\
-	graspobject o2;\\
-	hand o3;\\
-	humanspace o4;\\
-	joint o5g;\\
-	obstacle o6;\\
-	robot o7;\\
-	robotfpe o8;\\
-	world o9;\\
-	worldobject o10;\\
 	
-	This is an example of declaring objects for all the classes which are included into the main node file using header files.
-	Here objects are defined which are later used to input values into variables defined in the classes.\\
+This is an example of declaring objects for all the classes which are included into the main node file \lstinline{robot_models_node.cpp} ,using header files.
+Here objects are defined which are later used to input values into variables defined in the classes.
 	
-  - o9.c->robname="Robot FPE";\\
-    o9.c->of=true;\\
-    o9.c->moving=false;	\\
+\lstset {language=C++}
+\lstset{
+	basicstyle=\fontsize{11}{11}\selectfont\ttfamily
+}
+\begin{lstlisting}
+o9.c->robname="Robot FPE";
+o9.c->of=true;
+o9.c->moving=false;	
+\end{lstlisting}
+	   
     
-    This is an example of defining values to the variables in the classes included as header files.In many cases pointers are used because they are useful to implement association and aggregation functionalities to program.\\
+This is an example of defining values to the variables in the classes included as header files inside the main program.In many cases pointers are used because they are useful to implement association and aggregation functionalities to program which are imperative.\\
     
-    Association is used where classes are together related by a relation which is not inheritance but only associated to each other because they use each other's values.In this case the class quaternion is associated to worldobject class by association relation which is implemented by including the associated class quaternion.h as header file.\\
+Association is used where classes are together related by a relation which is not inheritance but only associated to each other because they use each other's values.In this case the class quaternion is associated to worldobject class by association relation which is implemented by including the associated class quaternion.h as header file.\\
     
-    Aggregation is used where a class if do not exist must not let its child classes be instantiated and used.
-    In this case world class is in aggregation relation to abstract object class which have child classes as Grasp Object and Obstacle class.So if the world class were not to exist the Grasp Object and Obstacle Class cannot be initialized or used.
-    So pointers are used to take input values for child classes using pointer variables of world class.\\
-
-    - tf2ros::Buffer tfBuffer;\\
-      tf2ros::TransformListener tfListener(tfBuffer);\\
-
-    tf2 is used to listen to values of variables and hence a buffer is allocated and used.\\
-
-    - while (nodehandle.ok()) {\\
-	  for (const auto topic : ROSNODE1::topics)\\
-	{\\
-		geometrymsgs::TransformStamped transformStamped1;\\
-		geometrymsgs::TransformStamped transformStamped2;\\
-		geometrymsgs::TransformStamped transformStamped3;\\
-		...\\
-		
-		while loop is started and objects named transformStamped are defined here.\\
-
-    -  try {\\
-	transformStamped1 = tfBuffer.lookupTransform("world", o5a.name, ros::Time(0));\\
-	transformStamped2 = tfBuffer.lookupTransform("world", o5b.name, ros::Time(0));\\
-	transformStamped3 = tfBuffer.lookupTransform("world", o5c.name, ros::Time(0));\\
+Aggregation is used where a class if do not exist must not let its child classes be instantiated and used.
+In this case world class is in aggregation relation to abstract object class which have child classes as Grasp Object and Obstacle class.So if the world class were not to exist the Grasp Object and Obstacle Class cannot be initialized or used.
+So pointers are used to take input values for child classes using pointer variables of world class.
+	
+\lstset {language=C++}
+\lstset{
+	basicstyle=\fontsize{11}{11}\selectfont\ttfamily
+}
+\begin{lstlisting}
+tf2ros::Buffer tfBuffer;
+tf2ros::TransformListener tfListener(tfBuffer);	
+\end{lstlisting}
+  
+tf2 is used to listen to values of variables and hence a buffer is allocated and used. After this a listener object is defined for tf2 which will be later used to listen the values and use them in program.
+
+\lstset {language=C++}
+\lstset{
+	basicstyle=\fontsize{11}{11}\selectfont\ttfamily
+}
+\begin{lstlisting}
+while (nodehandle.ok()) {
+for (const auto topic : ROSNODE1::topics)
+{
+geometrymsgs::TransformStamped transformStamped1;
+geometrymsgs::TransformStamped transformStamped2;
+geometrymsgs::TransformStamped transformStamped3;
+\end{lstlisting}
+	
+while loop is started and objects named transformStamped1...n are defined here one for each joint.
+
+\lstset {language=C++}
+\lstset{
+	basicstyle=\fontsize{11}{11}\selectfont\ttfamily
+}
+\begin{lstlisting}
+try {
+transformStamped1=tfBuffer.lookupTransform("world",o5a.name,ros::Time(0));
+transformStamped2=tfBuffer.lookupTransform("world",o5b.name,ros::Time(0));
+transformStamped3=tfBuffer.lookupTransform("world",o5c.name,ros::Time(0));
+...
+\end{lstlisting}
+  
+A try catch block is used to compare and connect the attribute of robot for which we seek values i.e. in this case joint 1...7 ,relative to standard world and store them in the defined objects transformStamped1,2,3....\\
+Then exceptions are caught and some statements defined for this .This is in case some things don't work.
+
+\lstset {language=C++}
+\lstset{
+	basicstyle=\fontsize{10}{11}\selectfont\ttfamily
+}	
+\begin{lstlisting}
+o5a.p1[0]=transformStamped1.transform.translation.x;
+o5a.p1[1]=transformStamped1.transform.translation.y;
+o5a.p1[2]=transformStamped1.transform.translation.z;
+ROSINFOSTREAM("pandalink1 Position is"<<"x="<< o5a.p1[0]<<",y="<<o5a.p1[1]<<",
+z="<<o5a.p1[2]);		
 	
-	A try catch block is used to compare and connect the attribute of robot for which we seek values relative to standard world and store them in the defined objects transformStamped1,2,3....\\
-	Then exceptions are caught and some statements defined for this .This is in case some things don't work.\\
+o5a.or2.w=transformStamped1.transform.rotation.w;
+o5a.or2.x=transformStamped1.transform.rotation.x;
+o5a.or2.y=transformStamped1.transform.rotation.y;
+o5a.or2.z=transformStamped1.transform.rotation.z;
+ROSINFOSTREAM("pandalink1 Orientation is"<<"w="<< o5a.or2.w<<",x="<<o5a.or2.x<<",
+y="<<o5a.or2.y<<",z="<<o5a.or2.z);
+\end{lstlisting}
 	
-	-o5a.p1[0]=transformStamped1.transform.translation.x;\\
-	o5a.p1[1]=transformStamped1.transform.translation.y;\\
-	o5a.p1[2]=transformStamped1.transform.translation.z;\\
-	ROSINFOSTREAM("pandalink1 Position is " <<" x = "<< o5a.p1[0] << ", y = "<< o5a.p1[1]<<", z = "<<o5a.p1[2]);\\		
-		
-	o5a.or2.w=transformStamped1.transform.rotation.w;\\
-	o5a.or2.x=transformStamped1.transform.rotation.x;\\
-	o5a.or2.y=transformStamped1.transform.rotation.y;\\
-	o5a.or2.z=transformStamped1.transform.rotation.z;\\
-	ROSINFOSTREAM("pandalink1 Orientation is " <<" w = "<< o5a.or2.w<<", \\ x = "<<o5a.or2.x<<", y = "<<o5a.or2.y<<", z = "<<o5a.or2.z);	\\
 	
-	Here in these steps the object variables take input the objects values from listener object transformStamped and store them in the node program and which then outputs these values.All the static variable defined in the program are initialized with 0 value before using them.\\
+Here in these steps the object variables take input the objects values from listener object transformStamped and store them in the node program and which then outputs these values.All the static variable defined in the program are initialized with 0 value before using them.\\
 	
-	This program thus listens to values from simulation and displays using appropriate output statements. The robot in Gazebo is thus connected to Rviz motion planner using ROS which enables our node program robotmodelsnode to listen to desired values in this case position and orientation of joints .
+This program thus listens to values from simulation and displays using appropriate output statements. The robot in Gazebo is thus connected to Rviz motion planner using ROS which enables the node program \lstinline|robot_models_node.cpp|, to listen to desired values in this case position and orientation of joints .
 	
 
 
diff --git a/sections/intro.tex b/sections/intro.tex
index d1c5d71..c063e7d 100644
--- a/sections/intro.tex
+++ b/sections/intro.tex
@@ -2,78 +2,62 @@
 
 \paragraph{Objective Of Thesis:} Design and Implementation of a Model-based Architecture for Cobotic Cells.\\
 
-
 \section {What is Robotics?}
-\cite{1}Robotics is concerned with study of all the machines that can replace human beings in execution of a task, both w.r.t. physical activity and decision-making. Throughout history human beings have constantly attempted to seek substitutes that can mimic their behavior when interacting with surrounding environment.
+Robotics is about and deals with study of all the machines that can replace human beings in execution of a task, both w.r.t. physical activity and decision-making .Throughout history human beings have constantly attempted to seek substitutes that can mimic their behavior when interacting with surrounding environment \cite{1}.
 \section {About Robots}
-\cite{2}A robot is a machine programmable by a computer,made to carry out a complex series of actions automatically. Robots can be guided by an external control device or the control may be embedded within. Robots may be constructed resembling human form, but most robots are machines designed to perform a task with no regard to their aesthetics. \cite{3} Robots can be autonomous or semi-autonomous ranges from humanoids to industrial robots, medical operating robots, patient assist robots, dog therapy robots, collectively programmed swarm robots, UAV drones such as General Atomics MQ-1 Predator, and even microscopic nano robots. By mimicking a lifelike appearance or automating movements, it conveys a sense of intelligence . Artificial Intelligence is expected to proliferate in the coming decade, with home robotics and the autonomous car as some main drivers.
+A robot is a machine programmable by a computer,made to carry out a complex series of actions automatically \cite{2}. They can be guided by an external control device or the control may be embedded within and may be constructed resembling human form, but most robots are machines designed to perform a task with no regard to their aesthetics. 
+These machines range from autonomous or semi-autonomous ranging from humanoids to industrial robots and medically used and operating machines to patient assisting robots \cite{3}. By mimicking a lifelike appearance or automating movements, it conveys a sense of intelligence . Artificial Intelligence is expected to proliferate in the coming decade, with home robotics and the autonomous car as some main drivers.
 \section {Uses And Applied Fields}
-Robots have a wide range of uses and applied fields. Some uses range from helping fight forest fires,Working alongside humans in manufacturing plants (known as co-bots),Robots that offer companionship to elderly individuals,Surgical assistants,Last-mile package and food order delivery,Autonomous household robots that carry out tasks like vacuuming and mowing the grass,Landmine detectors in war zones.Some of applied fields are -\\
-
-\subsection{Electronics}
-\cite{4}Mass-produced Printed Circuit Boards (PCBs) are almost exclusively manufactured by pick-and-place robots, typically with SCARA manipulators, which remove tiny electronic components from strips or trays, and place them on to PCBs with great accuracy.Such robots can place hundreds of thousands of components per hour, far out-performing a human in speed, accuracy, and reliability.
+Robots have a wide range of uses and applied fields. Some uses range from helping fight forest fires to working alongside humans in manufacturing plants (known as co-bots),ones that offer companionship to elderly individuals,surgical assistants,autonomous household robots that carry out tasks like vacuuming and mowing the grass.Some of applied fields are -\\
 
-\subsection{Automated Guided Vehicles (AGVs)}
-\cite{5}Mobile robots, following markers or wires in the floor, or using vision or lasers, are used to transport goods around large facilities, such as warehouses, container ports, or hospitals
+\subsection{Electrical Components Manufacturing}
+Mass-produced Printed Circuit Boards (PCBs) are almost exclusively manufactured by pick-and-place robots, typically with SCARA manipulators, which remove tiny electronic components from strips or trays, and place them on to PCBs with great accuracy.Such robots can place hundreds of thousands of components per hour, far out-performing a human in speed, accuracy, and reliability \cite{4}.
 
-\subsection{Space Probes}
-\cite{6}Almost every unmanned space probe ever launched was a robot.Some were launched in the 1960s with very limited abilities, but their ability to fly and land is an indication of their status as a robot. This includes the Voyager probes and the Galileo probes, among others. 
+\subsection{Automatic Pre-programmed Vehicles (APVs)}
+Mobile robots, following markers or wires in the floor, or using vision or lasers, are used to transport goods around large facilities, such as warehouses, container ports, or hospitals \cite{5}.
 
-\subsection{Cobots}
-\cite{7}Cobots or collaborative robots, are robots intended for direct human robot interaction within a shared space, or where humans and robots are in close proximity. Cobot applications contrast with traditional industrial robot applications in which robots are isolated from human contact.Cobot safety may rely on lightweight construction materials, rounded edges, and inherent limitation of speed and force, or on sensors and software that ensures safe behavior. 
+\subsection{Space Robotic Probes}
+Almost every unmanned space probe ever launched was a robot.Some were launched in the 1960s with very limited abilities, but their ability to fly and land is an indication of their status as a robot. This includes the Voyager probes and the Galileo probes, among others \cite{6}.
 
-\subsection{Construction Robots}
-\cite{8}Construction robots can be separated into three types: traditional robots, robotic arm, and robotic exoskeleton.
+\subsection{Soft-bodied Cobots}
+Robots with silicone bodies and flexible actuators (air muscles, electro active polymers and ferrofluids) look and feel different from robots with rigid skeletons, and can have different behaviors. Soft, flexible (and sometimes even squishy) robots are often designed to mimic the biomechanics of animals and other things found in nature, which is leading to new applications in medicine, care giving, search and rescue, food handling and manufacturing, and scientific exploration \cite{7}.
 
-\subsection{Soft-bodied Robots}
+\subsection{Cobots}
+Cobots or collaborative robots, are robots intended for direct human robot interaction within a shared space, or where humans and robots are in close proximity. Cobot applications contrast with traditional industrial robot applications in which robots are isolated from human contact.Cobot safety may rely on lightweight construction materials, rounded edges, and inherent limitation of speed and force, or on sensors and software that ensures safe behavior \cite{8}.
 
-\cite{9}Robots with silicone bodies and flexible actuators (air muscles, electro active polymers and ferrofluids) look and feel different from robots with rigid skeletons, and can have different behaviors. Soft, flexible (and sometimes even squishy) robots are often designed to mimic the biomechanics of animals and other things found in nature, which is leading to new applications in medicine, care giving, search and rescue, food handling and manufacturing, and scientific exploration.
+\subsection{Robots in Construction for Homes}
+Construction robots can be used for working in hazardous construction cites. \cite{9}.
 
-\subsection{Home Automation For Elderly and Disabled} 
+\subsection{Elderly Assistance}  
+The population is aging in many countries, especially Japan, meaning that there are increasing numbers of elderly people to care for, but relatively fewer young people to care for them.\\Humans make the best carers, but where they are unavailable, robots are gradually being introduced.FRIEND Robot is an example\cite{10}
 
-\cite{10}Robots used in home automation have developed over time from simple basic robotic assistants, such as the Handy1 through to semi-autonomous robots, such as FRIEND which can assist the elderly and disabled with common tasks. 
-The population is aging in many countries, especially Japan, meaning that there are increasing numbers of elderly people to care for, but relatively fewer young people to care for them.\\Humans make the best carers, but where they are unavailable, robots are gradually being introduced.
-FRIEND is a semi-autonomous robot designed to support disabled and elderly people in their daily life activities, like preparing and serving a meal. FRIEND make it possible for patients who are paraplegic, have muscle diseases or serious paralysis (due to strokes etc.), to perform tasks without help from other people like therapists or nursing staff. 
 \section {Types Of Robots}
-\cite{11}Various types of Robots are described below.
-
-\subsection{Pre-Programmed Robots}
-
-Pre-programmed robots operate in a controlled environment where they do simple, monotonous tasks. An example of a pre-programmed robot would be a mechanical arm on an automotive assembly line. The arm serves one function — to weld a door on, to insert a certain part into the engine, etc. — and it’s job is to perform that task longer, faster and more efficiently than a human.
+Various types of Robots classification are described below \cite{11}.
 
 \subsection{Humanoid Robots}
+Humanoid robots are robots that look like and/or mimic human behavior. These robots usually perform human-like activities (like running, jumping and carrying objects), and are sometimes designed to look like us, even having human faces and expressions.They have soft surfaces and use silicone materials to look and feel and even move like a human. Examples of humanoid robots are Hanson Robotics’ Sophia and Boston Dynamics’ Atlas.
 
-Humanoid robots are robots that look like and/or mimic human behavior. These robots usually perform human-like activities (like running, jumping and carrying objects), and are sometimes designed to look like us, even having human faces and expressions. Two of the most prominent examples of humanoid robots are Hanson Robotics’ Sophia (in the video above) and Boston Dynamics’ Atlas.
-
-\subsection{Autonomous Robots}
-
-Autonomous robots operate independently of human operators. These robots are usually designed to carry out tasks in open environments that do not require human supervision. An example of an autonomous robot would be the Roomba vacuum cleaner, which uses sensors to roam throughout a home freely.
-
-\subsection{Teleoperated Robots}
-
-They are mechanical bots controlled by humans. These robots usually work in extreme geographical conditions, weather, circumstances, etc. Examples of teleoperated robots are the human-controlled submarines used to fix underwater pipe leaks during the BP oil spill or drones used to detect landmines on a battlefield.
-
-\subsection{Augmenting Robots}
-
-Augmenting robots either enhance current human capabilities or replace the capabilities a human may have lost. Some examples of augmenting robots are robotic prosthetic limbs or exoskeletons used to lift hefty weights.
+\subsection{Pre-Programmed Robots}
 
+Pre-programmed robots operate in a controlled environment where they do simple, monotonous tasks. Example of a pre-programmed robot is a mechanical arm on an bike assembly line. The arm serves one function — to weld iron, to insert a certain part into the engine, etc. — and it’s job is to perform that task longer, faster and more efficiently than a human.
 
 \section {Importance Of Robots In Today's Time}
-In business, time and cost are important factors. Robots are easier and cheaper to work with when compared to humans makes them attractive. Given the risk and danger involved in some tasks, robots are the ideal alternative to human labor.For instance, a robot can move around gas towers, travel space and bring back feedback, all this without exposing human life to any kind of danger. They cannot get tired. It is in human nature to get tired after performing repetitive tasks for a long time. This is different when robots are at work. They can work for a series of days, weeks or even months without getting fatigue. They are programmed to produce accurate results and this makes them ideal for repetitive procedures. Unlike humans, robots do not get tired. Automating a process means ruling out the possibility of having to deal with sick-offs, absenteeism and go-slows that normally delay work. Robots are made from metals and plastics and have three major parts namely; the controllers, sensors and mechanical parts. All these parts are interdependent and the functionality of one depends on the other. Sensors are the parts that inform a robot of its surroundings.  There are robots that are able to tell the amount of pressure that needs to be exerted in order to enable the required grip.Controllers are best described as the robot’s ‘brain’. In many cases, they are run on computer programs. Each robot has specific commands which control all the movements of all the movable parts of a robot.The mechanical parts are the parts that are responsible for moving the robots. They include grippers, gears, pistons and motors. To enable movements, the mechanical parts of robots are normally powered by water, electricity or air.
+In any organization doing monetary work, most important factors are time and cost for any situation. Generally humans work in these organizations but robots are easier and cheaper to work with when compared to humans and this makes them very versatile and attractive in this context.There is also a lot of danger and risk in many tasks that if handled by humans can be expensive or hazardous to life but, robots are the attractive alternative to human labor.For example in a situation, a robot can move around gas towers, travel space and bring back feedback, all this without human intervention and thus sans exposing human life to any kind of danger. Another factor is humans can get tired but robots cannot get tired. To get exhausted is character of human but even after performing long times robots do not get tired and for a long time. They can work for days, weeks or even months without getting fatigue and this too in repetitive situations.\\
 
-\newpage
+Robots are machines that are usually programmed to produce accurate results ever time and this makes them ideal for critical procedures ranging from medical procedures on humans to assisting in factories ,all due to their versatility in both hardware and software systems. Robots do not get frustrated like a human in any adverse situation this makes them suitable to perform more reliably. Automating processes means to rule out the possibility of having to deal with sick-offs, absenteeism and go-slows that normally delay work.\\
 
-\section {Expectations From Robots}
+Robots are made from man made materials which are usually metals and plastics and they have three major parts namely; the controllers, sensors and mechanical parts. All these parts are intermingled and the functionality of one depends on the other. Sensors are the parts of machine programmed to inform a robot of its surrounding information.  These machines are able to tell the amount of pressure that needs to be exerted in order to enable the required grip. Secondly,controllers are best described as the robot’s brain power. In most cases these days, robots run on computer programs. every one of it has specific commands which control all the movements of all the movable parts of overall machines.The mechanical parts are the parts that are responsible for moving the robots. They include grippers, gears, pistons and motors. To enable movements, the mechanical parts of robots are normally powered by water, electricity or air.\cite{12}
 
-\cite{12}Three expectations from robots which are called Laws for Robotics are :
 
+\section {Expectations From Robots}
 
+Laws of Robotics which define three expectations from the field of Robotics are :\\
 
-‒ A robot must not injure a human being or allow the injury of a human being due to inactivity.\\
+‒ A robot must not cause injury to a human being or allow the injury of a human being due to inactivity.\\
 
-‒ A robot must obey the orders which are given by human beings except of those that conflict with the First Law.\\
+‒ A robot under all circumstance must obey the orders which are given by human beings except of those that conflict with the First Law.\\
 
-‒ A robot must protect its existence unless such a protection conflicts with the First or Second Law.
+‒ A robot must protect its existence unless in a situation in conflicts with the First or Second Law.
 
 
 
diff --git a/sections/motiv.tex b/sections/motiv.tex
index 45c2cee..6cf2226 100644
--- a/sections/motiv.tex
+++ b/sections/motiv.tex
@@ -1,11 +1,15 @@
-\chapter{MOTIVATION}\label{ch:evaluation}
+\chapter{NEED-PLAN-IMPETUS}\label{ch:evaluation}
 
-The topic of thesis is about Design and Implementation of a Model Based Architecture for Cobotic Cells. With the advent of tactile internet regularizing coexistence of robots and humans has become imperative, meaning the so called cobots need a new use case architecture for its unit cell to operate safely alongside humans and real world objects. This architecture is based on multiple models each describing one aspect of use case aiding in functionality. For this the thesis described three models namely World Model ,Application Model and Safety Model which are described using different notations. The World Model is a global model describing the robot and other things in the environment on the whole giving information about the components in real world, this includes one or more humans which can be moving in and out of world zone, obstacles and grasp object which can be a ball or cube. The Application model describes the flow of individual actions of grasping can be performed by cobot according to a motion trajectory to accomplish the given task . Lastly, the Safety Model shows how a cobot achieves goal of not causing any harm to humans or other objects in proximity and how to respond to them to move around them appropriately in cases collisions are detected . \\
+The topic of thesis is about Design and Implementation of a Model Based Architecture for Cobotic Cells. With the advent of tactile internet regularizing coexistence of robots and humans has become imperative, meaning the so called "Cobots" need a new use case architecture for its unit cell to operate safely alongside humans and real world objects. This architecture is based on multiple models each describing one aspect of use case aiding in functionality. For this the thesis described three models namely World Model, Application Model and Safety Model which are described using different notations. The World Model is a global model describing the robot and other things in the environment giving "on the whole" information about the components in real world, this includes one or more humans which can be moving in and out of world zone,then some obstacles and grasp object which can be a ball or cube. The Application model describes the flow of individual actions of grasping that can be performed by Cobot according to a motion trajectory to accomplish the given task . Lastly, the Safety Model shows how a Cobot achieves goal of not causing any harm to humans or other objects in its proximity and how to respond to them by moving around them appropriately in cases imminent collisions are detected .\\
 
-The real life problem scenario can be described as follows. We want a robot to perform some job and we want to make it to do that with safety i.e. detect and evade obstacles / humans and this can be achieved in two different step cases. The models designed and described address to this task or problem of first to train the robot for performing actions according to a preconceived plan using inbuilt “teaching” feature of robot and then doing it safely in real world conditions. The use case can be understood by seeing a scenario where we can train the robot in a laboratory / ideal condition and give a working functionality to it by giving a design which shows how to perform a task which robot can use to work accordingly . The Franka Panda robot has a teaching mode where we can set a series of poses and grasp actions which can train the robot to perform a task according to a plan and this can be done repetitively by the robot later in scenario 2 which is real world and has added conditions of realism ,for this complex conditions are added to teaching capabilities about how to respond when it detects a human in proximity and obstacles in trajectory paths.
+The real life problem scenario can be described as follows. Robot is expected to perform some job and to make it to do that with safety i.e. detect and evade obstacles / humans , this can be achieved in two different step cases. The models designed and described ,address to this task or problem of first, to train the robot for performing actions according to a preconceived plan using inbuilt “teaching” feature of robot and then doing it safely in real world conditions. The use case can be understood by seeing a scenario where we can train the robot in a laboratory / ideal conditions and give a working functionality to it by giving a design which shows how to perform a task which robot can use to work accordingly . The Franka Panda robot has a teaching mode where we can set a series of poses and grasp actions which can train the robot to perform a task according to a plan and this can be done repetitively by the robot later in scenario 2 which is real world and has added conditions of realism, for this, complex conditions are added to teaching capabilities about how to respond when it detects a human in proximity and obstacles in trajectory paths and in addition the simulation adds real world conditions like adding torque to joints as is in real world.
 \newpage
 
-In scenario one i already know the architecture of robot`s world model and have used its teaching capability to train it to move to a coordinate position and then start a trajectory say X to move a position close to an object that is needed to be say picked up and then it can use its gripper to pickup the object and again move arm to another desired location where it want to drop the object and there it releases the gripper to put that object down thus completing the task. This is smaller use case replication of saying a robot actually moved but here i restrict the idea to only moving arm which is same when it comes to functionality achieved by robot moving itself or moving arm as its fulfilling the same work of detecting things in proximity and achieving the trajectory tasks as well as at same time to do it safely by responding appropriately as per intended use case.So far i talked about training the robot in scenario 1 and now i consider another scenario which is say a real world task where the robot is made to perform the same work it was trained in Scenario 1 but in real life and this means the safety aspect should now be built into the scenario and for this i made a safety architecture which is used by robot by telling it how to respond when seeing an obstacle like a cube or box for example or a human being.
+In scenario one, the architecture of robot`s world model is already known and has thus been used to teach its capability to move physically to a coordinate position and then start a trajectory for instance X to move a position close to an object, that is needed to be for instance picked up and then after executing this task can again move to another desired location where it want to drop the object and there it releases the gripper to put that object down and thus completing the task.This sets the tone for designing an Application Model.\\
+
+This one here described is smaller use case replication of above case, of assuming a robot actually moved but here in this case the idea is restricted to only moving arm which is the same when it comes to functionality achieved by robot moving itself vis-a-vis moving its arm as previously mentioned, so it is fulfilling the same work of detecting things in proximity and achieving the trajectory tasks,and also at same time doing it safely by responding appropriately as per intended use case which sets the tone to design a safety model.\\
+
+So far above description talks about training the robot in scenario 1 and now another scenario is considered which is say a real world task where the robot is made to perform the same work it was trained in Scenario 1 but in real life and this means the safety aspect should now be built into the scenario and for this a safety architecture is constructed which is used by robot, by telling it how to respond when seeing an obstacle like a cube or box for example or a human being.
 
 
 
diff --git a/sections/soa.tex b/sections/soa.tex
index 435d577..bca468c 100644
--- a/sections/soa.tex
+++ b/sections/soa.tex
@@ -2,55 +2,50 @@
 
 \section {Motion Planning And Simulations}
 
-This Robotics project revolves around the idea of Motion Planning. It is about Design and Implementation of a Model Based Architecture for Cobotic Cells. The basic idea is that we have a Robot which we want to cohabit with humans and make it real world intelligent and this means it has to work in real world where we have obstacles, objects and humans. Accidents are imminent and thus the case study is that the Artificial Intelligence has to be built into the Robot. We have a Robot and then one or more humans and then one or more obstacles in real world. The base of Robot is fixed and the arm is moving and we have a gripper which need to do a pickup and release job. The robot has to be programmed to move and not just move but detect obstacles / humans around the Robot. To start with ,the robot uses a motion planner to move arm and pickup an object and then move again according to already planned motion trajectory and release the object at desired location thus completing the task. So far this plan is only about doing the task but sans the idea of any kind of obstacle or human which can cause a hindrance to already planned motion which would thus require an alteration to planned trajectory right at that time instant when sensors detect obstacle and move around the obstacle to reach a coordinate position around the obstacle to a point in pre decided motion plan and then continue motion from there onwards. After the motion planning part i use a simulation software to see if my motion trajectory correction is feasible and working in real life and to see how successful it can be.\\
+This Robotics project revolves around the idea of Motion Planning. It is about Design and Implementation of a Model Based Architecture for Cobotic Cells. The basic idea is that we have a Robot which we want to cohabit with humans and make it real world intelligent and this means it has to work in real world where we have obstacles, objects and humans. Accidents are imminent and thus the case study is that the artificial intelligence has to be built into the Robot. A Robot is present and then one or more humans and then one or more obstacles in real world. The base of Robot is fixed and the arm is moving and there is a gripper which need to do a pickup and release job. The robot has to be programmed to move and not just move but detect obstacles / humans around the Robot continuously.\\
 
-To demonstrate such a concept i show a world which contains the Robot Panda, its arm, obstacle object which can be a ball or cube box and human beings. I am designing a World Model , an Application Model, and Safety model for which i use different diagram forms and they are discussed further.
-
-\newpage
+To start with a fictitious plan,the robot uses a motion planner to move arm and pickup an object and then move again according to already planned motion trajectory and release the object at desired location thus completing the task. So far this plan is only about doing the task but sans the idea of any kind of obstacle or human which can cause a hindrance to already planned motion which would thus require an alteration to planned trajectory right at that time instant when sensors detect obstacle and move around the obstacle to reach a coordinate position around the obstacle to a point in pre decided motion plan and then continue motion from there onwards. After the motion planning part a simulation software is utilized to see if motion trajectory correction is feasible and working in real life and to see how successful it can be.\\
 
+To demonstrate such a concept a world is shown which contains the Robot Panda, its arm, obstacle object which can be a ball or cube box and human beings. The thesis work designs a World Model , an Application Model, and Safety model for which different diagram forms are used and they are discussed further.
 
+\newpage
 \section {Modeling - Explaining Choice of Design Depictions}
-I have tried to find correct diagram depictions for all three intended Diagrams which are World Model,Application Model and lastly Safety Model. Design depictions explained as follows. \\
-
-Unified Modelling Language UML For Class And Object Diagrams For World Model Using Online Visual Paradigm is shown in Fig4.1. displayed above.\\
-
+Thesis work tries to find correct diagram depictions for all three intended Diagrams which are World Model, Application Model and lastly Safety Model.
+\begin{enumerate}[label=(\Alph*)]
+\item WORLD MODEL\\
+Unified Modelling Language UML For Class And Object Diagrams For World Model Using Online Visual Paradigm is shown in Fig4.1 and Fig4.2 is displayed above.The UML class model is used to derive the objects and depict in the UML object model.\cite{17}
 \begin{figure}
 	\centering
-	\includegraphics[width=1.0\linewidth]{../app2}
-	\caption{  WORLD MODEL UML CLASS DIAGRAM}
+	\includegraphics[width=0.9\linewidth]{../app2}
+	\caption{  World Model UML Class Diagram}
 	\label{fig:app2}
 \end{figure}
-
-
-This World Model Class Diagram has been designed to contain ten classes . The main parent classes are World Object and World Class . The classes Robot and Abstract Object inherit from one class and at same time are associated with another class . \\
-
+This World Model Class Diagram has been designed to contain ten classes. The main parent classes are World Object and World present on top most level. The classes Robot and Abstract Object inherit from one class and at same time are aggregated with another class. Relation of Association and Aggregation is used where classes need to use data variable from other classes and if class variable must depend on other class variables for it to exist respectively.Aggregation is used where if the class is not involved its child classes cannot be used. Association is used where one class is related to another just to be able to use other's variables. Both relations use pointers. 
 \begin{figure}
 	\centering
-	\includegraphics[width=1.0\linewidth]{../app3}
-	\caption{  WORLD MODEL UML OBJECT DIAGRAM}
-	\label{fig:app3}	
-	
+	\includegraphics[width=0.9\linewidth]{../app3}
+	\caption{  World Model UML Object Diagram}
+	\label{fig:app3}		
 \end{figure}
-
-Fig4.2.The UML class model is used to derive the objects and depict in the UML object model.
-
 \newpage
+\item APPLICATION MODEL – BUSINESS PROCESS MODELING NOTATION USING  \\  
+ MODELIO \\
+ 
+Business Process Modelling Notation(BPMN) For  Application Model Using Modelio was chosen for Application Model.Depicted in Figure 4.3.\cite{18}\\
 
-APPLICATION MODEL – BUSINESS PROCESS MODELING NOTATION USING MODELIO\\
+This was found to be good choice to show Application process as this shows cells which depict each component in the world model diagram and then allowed to depict the relationship and connection between their components and showing their flow which have a comprehensive and logical consistency among cells. It uses start and end event states and then “if” conditions as well as flow lines with process events and intermediate events to other components of world diagram to construct Application Model.In detail components are described as - \\
 
-Business Process Modelling Notation For  Application Model Using Modelio,Business Process Modelling Notation for designing Application Model was chosen .Figure 4.3.
-This was found to be good choice to show Application process as this shows cells which depict each component in the world  and then allowed to depict the relationship and connection between them and showing their flow which have a comprehensive and logical consistency among cells. It uses start and end event states and then “if” conditions as well as flow lines to other components of world.\\
- 
-\begin{figure}
+Events, Activities and Gateways. Objects are connected using Sequence Flows , Message Flows and Associations. Each segment is known as frames.
+
+\begin{figure} 
 	\centering
-	\includegraphics[width=0.9\linewidth]{../app}
+	\includegraphics[width=0.8\linewidth]{../app}
 	\caption{BPMN APPLICATION MODEL DIAGRAM}
 	\label{fig:app}
 \end{figure}
-
 \newpage
 
-SAFETY MODEL DEPICTION USING MODELIO UML STATE CHART \\
+\item SAFETY MODEL DEPICTION USING UML STATE CHART \\
 
 \begin{figure}
 	\centering
@@ -59,28 +54,32 @@ SAFETY MODEL DEPICTION USING MODELIO UML STATE CHART \\
 	\label{fig:app4}
 \end{figure}
 
+In Fig.4.4 The start and end events denote the process starting and end. The transition T1 is about human presence. The robot motion begins according to motion already planned using MoveIT but then if the human presence is detected by sensors, it calls MoveIT for new trajectory and proceeds with motion but once again checks if human is detected using if condition and if so then return to Human Present condition from this state  and once again a new motion is planned by MoveIT until a state Is achieved where a human is absent and a final transition T2 is executed which proceed to the end event finally.This model can be extended in more detail in the future using this concept along with a different notation that can make the cases more detailed and thus more extensive. MAPE-K Loops can also be used to denote the Safety of a system and in addition computer generated graphics can as well be used to depict the safety model of a system, this enables te research to not get restricted to just BPMN notation for safety models of a system\cite{18}.\\ 
 
-Refer Fig.4.4 
-The start and end events denote the process starting and end. The transition T1 is about human presence. The robot motion begins according to motion already planned using MoveIT but then if the human presence is detected by sensor it return to Human Present condition from this state once again a new motion is planned by MoveIT until a state Is achieved where a human is absent and a final transition T2 is executed which proceed to the end event finally.  
+\begin{figure}
+	\centering
+	\includegraphics[width=0.6\linewidth]{../3-Figure4-1}
+	\caption{Paper : On Making Robots Understand Safety Image Source :https://journals.sagepub.com/doi/pdf/10.1177/0278364912462256}
+	\label{fig:3-figure4-1}
+\end{figure}
+
+Fig.4.5 There has been extensive work done to incorporate safety into real world robots particularly from Sami Haddadin. It has built the robot with technology ranging from making the robot understanding safety i.e by making them softer in approach when operating to preventing any physical collision by embedding injury knowledge into controls.The robot surfaces are made softer and force reduced when in proximity to collision objects upto a level where the robot can affirmatively detect the kind of object in proximity and classify that as a serious or not so seriously unsafe object.Extensive testing has been done on injuring pig skin\cite{19}.
+\end{enumerate}
 
 \newpage
 
 \section {Tools Used }
 \subsection{Setup Environment Using ROS To Run Services ,Motion Planning In RVIZ Using MOVEIT ,GAZEBO For Simulations}
-\footnote{https://erdalpekel.de/?p=55 , https://github.com/frankaemika/ , https://moveit.ros.org/ } ROS is an opensource robot operating system. ROS is not an operating system in the traditional sense of process management and scheduling; rather, it provides a structured communications layer above the host operating systems of a heterogeneous compute cluster. ROS relates to existing robot software frameworks, and briefly overview some of the available application software which uses ROS. Writing software for robots is difficult, particularly as the scale and scope of robotics continues to grow. Different types of robots can have wildly varying hardware, making code reuse nontrivial. On top of this, the sheer size of the required code can be daunting, as it must contain a deep stack starting from driver-level software and continuing up through perception, abstract reasoning, and beyond. Since the required breadth of expertise is well beyond the capabilities of any single researcher, robotics software architectures must also support large-scale software integration efforts. To meet these challenges, many robotics researchers, including ourselves, have previously created a wide variety of frameworks to manage complexity and facilitate rapid prototyping of software for experiments, resulting in the many robotic software systems currently used in academia and industry . Each of these frameworks was designed for a particular purpose, perhaps in response to perceived weaknesses of other available frameworks, or to place emphasis on aspects which were seen as most important in the design process. ROS, the framework described in this paper, is also the product of tradeoffs and prioritizations made during its design cycle. We believe its emphasis on large-scale integrative robotics research will be useful in a wide variety of situations as robotic systems grow ever more complex. \\
-
-\footnote{ROS:  an  open-source  Robot  Operating  System } Motion Planning RviZ/MoveIT 
-I used MoveIT for motion planning as this software lets us alter many different parameters of the robotic components and helps us to create case studies in a world environment for Robot Panda. Here we set a series of joints and poses and then set a trajectory for motion planning which we then use to run in a real world simulator. Its main purpose was to introduce an obstacle which is a cube box or which can be a human being and the motion plan is about moving the robot arm around the obstacle to reach a position which was decided earlier in the motion plan.\\
 
-Gazebo for simulations
-I used Gazebo which is real world simulator to run the motion plan from MoveIT . This has let me see if the real world simulation is possible for the conceived motion plan and trajectory . Gazebo also has additional features which can add real time parameters to its simulation like altering torque of joints to see how robot reacts in real world.
+\begin{enumerate}[label=(\Alph*)]
+\item ROS \footnote{https://erdalpekel.de/?p=55 , https://github.com/frankaemika/ , https://moveit.ros.org/ }\\
+ROS is an opensource robot operating system. ROS is not a regular OS in sense to not provide regular functions of OS like process management and scheduling but it provides a different set of services and acting like structured communications layer above the host operating systems . ROS is associated with existing frameworks of robots, with brief look on available application software which uses ROS.As Robotics is a wide field and continuously a topic of research and a growing one ,generating code for ROS is not easy. There are different category of robots available with high degree of variation in hardware, thus not enabling programmers to reuse code or develop on modules.In addition the total amount of code needed is too much for regular programmers, as it needs a deep stack starting from driver-level software and continuing up, and also needs abstract reasoning, and more. The required breadth and width of expertise needed is far more than the skills of any single researcher, robotics software architectures must also be able to be integrated with large-scale software . To address to these problems and make life easier for a regular programmer, many robotics researchers, have constructed huge number of frameworks to handle complexity and address to  rapid prototyping of software for experiments, thus resulting enabling research in industry and academia . Each of the frameworks were made keeping in mind a purpose, maybe for a response to perceived weakness of other available frameworks, or to place importance on dimensions which were seen as most important in the design process. ROS, the framework is designed not without tradeoffs and prioritizations made during its design cycle which were essential to do in interest of practical uses. It is still thought the tradeoffs will serve well to purposes of large-scale integrative robotics research in a wide variety of uses and cases as robotic systems grow ever more complex. \\
+\item RVIZ / MoveIT \footnote{ROS:  an  open-source  Robot  Operating  System } \\
+RviZ/MoveIT Motion Planner was used for motion planning as this software lets us alter many different parameters of the robotic components and helps to create case studies in a world environment for Robot Panda. Here a series of joints and poses is set and then a trajectory for motion planning which is then used to run in a real world simulator. Its main purpose was to introduce an obstacle which is a cube box or which can be a human being and the motion plan is about moving the robot arm around the obstacle to reach a position which was decided earlier in the motion plan.\\
+\item Gazebo \footnote{http://gazebosim.org/}
+Gazebo Simulator was used which is real world simulator to run the motion plan from MoveIT . This lets us see if the real world simulation is possible for the conceived motion plan and trajectory . Gazebo also has additional features which can add real time parameters to its simulation like altering torque of joints to see how robot reacts in real world.
+\end{enumerate}
 
 \newpage
 
-\section {Repos Used}
-\subsection{ERDAL`S Repositories And How They Are Used With Tools And Running Simulations}
-
-Erdal has provided tutorial to connect MoveIt to a simulated Panda Robot and for this they have provided three repositories namely frankaros , pandamoveitconfig and pandasimulation. They help to build the workspace and use MoveIt and other simulation software to motion plan and set and modify parameters for it. It thus helps us to connect the robot in MoveIT to Gazebo simulation to see how motion plan fairs in real life conditions.
-
-
 
diff --git a/task_description/task_de.tex b/task_description/task_de.tex
index 149802d..9aa91c1 100644
--- a/task_description/task_de.tex
+++ b/task_description/task_de.tex
@@ -1,5 +1,5 @@
-\RequirePackage[ngerman=ngerman-x-latest]{hyphsubst}
-\documentclass[ngerman,twoside]{tudscrreprt}
+\RequirePackage[nenglish=nenglish-x-latest]{hyphsubst}
+\documentclass[nenglish,twoside]{tudscrreprt}
 %\usepackage{selinput}\SelectInputMappings{adieresis={ä},germandbls={ß}}
 %\usepackage[T1]{fontenc}
 \usepackage{babel}
diff --git a/thesis.tex b/thesis.tex
index 91efab9..07cd45f 100644
--- a/thesis.tex
+++ b/thesis.tex
@@ -111,13 +111,17 @@
     \and Dipl.-Inf. Sebestian Ebert%
 }
 \professor{Prof. Dr. rer. nat habil. Uwe Aßmann}
-\date{19.10.2020}
+
+\date{23.10.2020}
+
 \maketitle
 \newpage
 
 \tableofcontents
 
 
+
+
 \input{sections/intro}
 \input{sections/motiv.tex}
 \input{sections/back.tex}
@@ -126,10 +130,8 @@
 \input{sections/imp.tex}
 \input{sections/eval.tex}
 
-\printbibliography[heading=bibintoc]\label{sec:bibliography}%
-
-
 
+\printbibliography[heading=bibintoc]\label{sec:bibliography}% 
 
 
 \confirmation
-- 
GitLab