diff --git a/doc/source/_static/dpf_operators.html b/doc/source/_static/dpf_operators.html
index d9cf9e841a..683e9ced46 100644
--- a/doc/source/_static/dpf_operators.html
+++ b/doc/source/_static/dpf_operators.html
@@ -2070,7 +2070,7 @@
Configurating operators
0 0 0
0 0 0
0 0 0
-">Example of workflows and their scripts
math: imaginary part
Inputs
Outputs
Configurations
Scripting
math: amplitude (fields container)
Inputs
Outputs
Configurations
Scripting
metadata: mesh support provider
Inputs
Outputs
Configurations
Scripting
result: beam axial stress (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: unit convert (fields container)
Inputs
Outputs
Configurations
Scripting
result: element orientations X
Inputs
Outputs
Configurations
Scripting
math: norm (fields container)
Inputs
Outputs
Configurations
Scripting
mapping: prepare mapping workflow
Inputs
Outputs
Configurations
Scripting
math: sqrt (fields container)
Inputs
Outputs
Configurations
Scripting
math: conjugate
Inputs
Outputs
Configurations
Scripting
utility: html doc
Inputs
Outputs
Configurations
Scripting
math: real part
Inputs
Outputs
Configurations
Scripting
result: current density
Inputs
Outputs
Configurations
Scripting
math: multiply (complex fields)
Inputs
Outputs
Configurations
Scripting
utility: merge result infos
Inputs
Outputs
Configurations
Scripting
result: cyclic kinetic energy
Inputs
Outputs
Configurations
Scripting
result: global total mass (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: unit convert
Inputs
Outputs
Configurations
Scripting
math: norm (field)
Inputs
Outputs
Configurations
Scripting
utility: make label space
Inputs
Outputs
Configurations
Scripting
math: sqrt (field)
Inputs
Outputs
Configurations
Scripting
math: accumulate min over label
Inputs
Outputs
Configurations
Scripting
result: y plus (y+)
Inputs
Outputs
Configurations
Scripting
math: +
Inputs
Outputs
Configurations
Scripting
server: grpc shutdown server
Inputs
Outputs
Configurations
Scripting
result: magnetic scalar potential
Inputs
Outputs
Configurations
Scripting
min_max: min max over time
Inputs
Outputs
Configurations
Scripting
math: time freq interpolation
Inputs
Outputs
Configurations
Scripting
math: + (fields container)
Inputs
Outputs
Configurations
Scripting
math: sin (fields container)
Inputs
Outputs
Configurations
Scripting
math: + constant (field)
Inputs
Outputs
Configurations
Scripting
math: / (component-wise field)
Inputs
Outputs
Configurations
Scripting
math: + constant (fields container)
Inputs
Outputs
Configurations
Scripting
math: cross product (fields container)
Inputs
Outputs
Configurations
Scripting
result: cyclic strain energy
Inputs
Outputs
Configurations
Scripting
invariant: scalar invariants (fields container)
Inputs
Outputs
Configurations
Scripting
mapping: find reduced coordinates
Inputs
Outputs
Configurations
Scripting
scoping: rescope property field
Inputs
Outputs
Configurations
Scripting
result: plastic strain principal 1
Inputs
Outputs
Configurations
Scripting
math: -
Inputs
Outputs
Configurations
Scripting
math: total sum
Inputs
Outputs
Configurations
Scripting
math: - (fields container)
Inputs
Outputs
Configurations
Scripting
scoping: intersect scopings
Inputs
Outputs
Configurations
Scripting
math: ^ (field)
Inputs
Outputs
Configurations
Scripting
scoping: elements in mesh
Inputs
Outputs
Configurations
Scripting
math: scale (field)
Inputs
Outputs
Configurations
Scripting
result: enthalpy
Inputs
Outputs
Configurations
Scripting
math: ^ (fields container)
Inputs
Outputs
Configurations
Scripting
result: global eroded internal energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: scale fields container
Inputs
Outputs
Configurations
Scripting
math: sweeping phase
Inputs
Outputs
Configurations
Scripting
math: centroid
Inputs
Outputs
Configurations
Scripting
math: sweeping phase (fields container)
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (field)
Inputs
Outputs
Configurations
Scripting
math: centroid (fields container)
Inputs
Outputs
Configurations
Scripting
math: ^2 (field)
Inputs
Outputs
Configurations
Scripting
utility: remove unnecessary labels
Inputs
Outputs
Configurations
Scripting
result: velocity Z
Inputs
Outputs
Configurations
Scripting
result: reaction force Z
Inputs
Outputs
Configurations
Scripting
math: sin (field)
Inputs
Outputs
Configurations
Scripting
math: cos (field)
Inputs
Outputs
Configurations
Scripting
math: cos (fields container)
Inputs
Outputs
Configurations
Scripting
logic: ascending sort
Inputs
Outputs
Configurations
Scripting
result: initial coordinates (LSDyna)
Inputs
Outputs
Configurations
Scripting
utility: convert to fields container
Inputs
Outputs
Configurations
Scripting
math: linear combination
Inputs
Outputs
Configurations
Scripting
math: ^2 (fields container)
Inputs
Outputs
Configurations
Scripting
result: mean static pressure
Inputs
Outputs
Configurations
Scripting
math: exp (field)
Inputs
Outputs
Configurations
Scripting
math: exp (fields container)
Inputs
Outputs
Configurations
Scripting
math: * (component-wise field)
Inputs
Outputs
Configurations
Scripting
result: stress max_shear
Inputs
Outputs
Configurations
Scripting
result: euler nodes
Inputs
Outputs
Configurations
Scripting
math: * (component-wise field) (fields container)
Inputs
Outputs
Configurations
Scripting
result: beam TR shear stress (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: num surface status changes
Inputs
Outputs
Configurations
Scripting
math: ln (field)
Inputs
Outputs
Configurations
Scripting
mesh: mesh to pyvista
Inputs
Outputs
Configurations
Scripting
math: ln (fields container)
Inputs
Outputs
Configurations
Scripting
invariant: scalar invariants (field)
Inputs
Outputs
Configurations
Scripting
math: cross product
Inputs
Outputs
Configurations
Scripting
filter: high pass (timefreq)
Inputs
Outputs
Configurations
Scripting
math: / (component-wise fields container)
Inputs
Outputs
Configurations
Scripting
result: global sliding interface energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: kronecker product
Inputs
Outputs
Configurations
Scripting
math: modulus (fields container)
Inputs
Outputs
Configurations
Scripting
result: joint relative angular velocity
Inputs
Outputs
Configurations
Scripting
math: dot (complex fields)
Inputs
Outputs
Configurations
Scripting
result: gasket stress XZ
Inputs
Outputs
Configurations
Scripting
math: / (complex fields)
Inputs
Outputs
Configurations
Scripting
utility: unitary field
Inputs
Outputs
Configurations
Scripting
utility: server path
Inputs
Outputs
Configurations
Scripting
result: beam axial force (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: derivate (complex fields)
Inputs
Outputs
Configurations
Scripting
math: polar to complex fields
Inputs
Outputs
Configurations
Scripting
utility: merge data tree
Inputs
Outputs
Configurations
Scripting
math: dot (fields container)
Inputs
Outputs
Configurations
Scripting
result: nodal moment
Inputs
Outputs
Configurations
Scripting
math: phase (field)
Inputs
Outputs
Configurations
Scripting
math: phase (fields container)
Inputs
Outputs
Configurations
Scripting
math: modulus (field)
Inputs
Outputs
Configurations
Scripting
result: elemental mass
Inputs
Outputs
Configurations
Scripting
result: heat flux
Inputs
Outputs
Configurations
Scripting
math: total sum (fields container)
Inputs
Outputs
Configurations
Scripting
result: co-energy
Inputs
Outputs
Configurations
Scripting
math: dot
Inputs
Outputs
Configurations
Scripting
math: outer product
Inputs
Outputs
Configurations
Scripting
math: overall dot
Inputs
Outputs
Configurations
Scripting
math: relative error
Inputs
Outputs
Configurations
Scripting
result: velocity Y
Inputs
Outputs
Configurations
Scripting
result: reaction force Y
Inputs
Outputs
Configurations
Scripting
result: global velocity (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: superficial velocity
Inputs
Outputs
Configurations
Scripting
math: absolute value by component (field)
Inputs
Outputs
Configurations
Scripting
result: incremental energy
Inputs
Outputs
Configurations
Scripting
result: thermal strain
Inputs
Outputs
Configurations
Scripting
result: stiffness matrix energy
Inputs
Outputs
Configurations
Scripting
math: absolute value by component (fields container)
Inputs
Outputs
Configurations
Scripting
result: total temperature
Inputs
Outputs
Configurations
Scripting
result: cyclic equivalent mass
Inputs
Outputs
Configurations
Scripting
result: acceleration Y
Inputs
Outputs
Configurations
Scripting
utility: delegate to operator
Inputs
Outputs
Configurations
Scripting
logic: component selector (fields container)
Inputs
Outputs
Configurations
Scripting
logic: component selector (field)
Inputs
Outputs
Configurations
Scripting
scoping: on property
Inputs
Outputs
Configurations
Scripting
result: stress intensity
Inputs
Outputs
Configurations
Scripting
min_max: over field
Inputs
Outputs
Configurations
Scripting
result: transient rayleigh integration
Inputs
Outputs
Configurations
Scripting
logic: same property fields?
Inputs
Outputs
Configurations
Scripting
logic: elementary data selector (fields container)
Inputs
Outputs
Configurations
Scripting
utility: convert to scoping
Inputs
Outputs
Configurations
Scripting
logic: elementary data selector (field)
Inputs
Outputs
Configurations
Scripting
utility: change location
Inputs
Outputs
Configurations
Scripting
mesh: node coordinates
Inputs
Outputs
Configurations
Scripting
mesh: stl export
Inputs
Outputs
Configurations
Scripting
utility: bind support
Inputs
Outputs
Configurations
Scripting
utility: convert to meshes container
Inputs
Outputs
Configurations
Scripting
result: beam torsional moment (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: equivalent stress parameter
Inputs
Outputs
Configurations
Scripting
utility: convert to field
Inputs
Outputs
Configurations
Scripting
result: beam axial total strain (LSDyna)
Inputs
Outputs
Configurations
Scripting
utility: set property
Inputs
Outputs
Configurations
Scripting
utility: forward field
Inputs
Outputs
Configurations
Scripting
utility: forward fields container
Inputs
Outputs
Configurations
Scripting
result: electric flux density
Inputs
Outputs
Configurations
Scripting
geo: integrate over elements
Inputs
Outputs
Configurations
Scripting
result: plastic strain principal 2
Inputs
Outputs
Configurations
Scripting
utility: forward meshes container
Inputs
Outputs
Configurations
Scripting
result: compute total strain X
Example of workflows and their scripts
math: imaginary part
Inputs
Outputs
Configurations
Scripting
math: amplitude (fields container)
Inputs
Outputs
Configurations
Scripting
metadata: mesh support provider
Inputs
Outputs
Configurations
Scripting
result: beam axial stress (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: unit convert (fields container)
Inputs
Outputs
Configurations
Scripting
result: element orientations X
Inputs
Outputs
Configurations
Scripting
math: norm (fields container)
Inputs
Outputs
Configurations
Scripting
mapping: prepare mapping workflow
Inputs
Outputs
Configurations
Scripting
math: sqrt (fields container)
Inputs
Outputs
Configurations
Scripting
math: conjugate
Inputs
Outputs
Configurations
Scripting
utility: html doc
Inputs
Outputs
Configurations
Scripting
math: real part
Inputs
Outputs
Configurations
Scripting
result: current density
Inputs
Outputs
Configurations
Scripting
math: multiply (complex fields)
Inputs
Outputs
Configurations
Scripting
utility: merge result infos
Inputs
Outputs
Configurations
Scripting
result: cyclic kinetic energy
Inputs
Outputs
Configurations
Scripting
result: global total mass (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: unit convert
Inputs
Outputs
Configurations
Scripting
math: norm (field)
Inputs
Outputs
Configurations
Scripting
utility: make label space
Inputs
Outputs
Configurations
Scripting
math: sqrt (field)
Inputs
Outputs
Configurations
Scripting
result: y plus (y+)
Inputs
Outputs
Configurations
Scripting
math: accumulate min over label
Inputs
Outputs
Configurations
Scripting
math: +
Inputs
Outputs
Configurations
Scripting
server: grpc shutdown server
Inputs
Outputs
Configurations
Scripting
result: magnetic scalar potential
Inputs
Outputs
Configurations
Scripting
min_max: min max over time
Inputs
Outputs
Configurations
Scripting
math: time freq interpolation
Inputs
Outputs
Configurations
Scripting
math: + (fields container)
Inputs
Outputs
Configurations
Scripting
math: sin (fields container)
Inputs
Outputs
Configurations
Scripting
math: + constant (field)
Inputs
Outputs
Configurations
Scripting
math: / (component-wise field)
Inputs
Outputs
Configurations
Scripting
math: + constant (fields container)
Inputs
Outputs
Configurations
Scripting
math: cross product (fields container)
Inputs
Outputs
Configurations
Scripting
result: cyclic strain energy
Inputs
Outputs
Configurations
Scripting
invariant: scalar invariants (fields container)
Inputs
Outputs
Configurations
Scripting
mapping: find reduced coordinates
Inputs
Outputs
Configurations
Scripting
scoping: rescope property field
Inputs
Outputs
Configurations
Scripting
result: plastic strain principal 1
Inputs
Outputs
Configurations
Scripting
math: -
Inputs
Outputs
Configurations
Scripting
math: total sum
Inputs
Outputs
Configurations
Scripting
math: - (fields container)
Inputs
Outputs
Configurations
Scripting
scoping: intersect scopings
Inputs
Outputs
Configurations
Scripting
math: ^ (field)
Inputs
Outputs
Configurations
Scripting
scoping: elements in mesh
Inputs
Outputs
Configurations
Scripting
math: scale (field)
Inputs
Outputs
Configurations
Scripting
result: enthalpy
Inputs
Outputs
Configurations
Scripting
math: ^ (fields container)
Inputs
Outputs
Configurations
Scripting
result: global eroded internal energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: scale fields container
Inputs
Outputs
Configurations
Scripting
math: sweeping phase
Inputs
Outputs
Configurations
Scripting
math: centroid
Inputs
Outputs
Configurations
Scripting
math: sweeping phase (fields container)
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (field)
Inputs
Outputs
Configurations
Scripting
math: centroid (fields container)
Inputs
Outputs
Configurations
Scripting
math: ^2 (field)
Inputs
Outputs
Configurations
Scripting
utility: remove unnecessary labels
Inputs
Outputs
Configurations
Scripting
result: velocity Z
Inputs
Outputs
Configurations
Scripting
result: reaction force Z
Inputs
Outputs
Configurations
Scripting
math: sin (field)
Inputs
Outputs
Configurations
Scripting
math: cos (field)
Inputs
Outputs
Configurations
Scripting
math: cos (fields container)
Inputs
Outputs
Configurations
Scripting
logic: ascending sort
Inputs
Outputs
Configurations
Scripting
result: initial coordinates (LSDyna)
Inputs
Outputs
Configurations
Scripting
utility: convert to fields container
Inputs
Outputs
Configurations
Scripting
math: linear combination
Inputs
Outputs
Configurations
Scripting
math: ^2 (fields container)
Inputs
Outputs
Configurations
Scripting
result: mean static pressure
Inputs
Outputs
Configurations
Scripting
math: exp (field)
Inputs
Outputs
Configurations
Scripting
math: exp (fields container)
Inputs
Outputs
Configurations
Scripting
math: * (component-wise field)
Inputs
Outputs
Configurations
Scripting
result: stress max_shear
Inputs
Outputs
Configurations
Scripting
result: euler nodes
Inputs
Outputs
Configurations
Scripting
math: * (component-wise field) (fields container)
Inputs
Outputs
Configurations
Scripting
result: beam TR shear stress (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: num surface status changes
Inputs
Outputs
Configurations
Scripting
math: ln (field)
Inputs
Outputs
Configurations
Scripting
mesh: mesh to pyvista
Inputs
Outputs
Configurations
Scripting
math: ln (fields container)
Inputs
Outputs
Configurations
Scripting
invariant: scalar invariants (field)
Inputs
Outputs
Configurations
Scripting
math: cross product
Inputs
Outputs
Configurations
Scripting
filter: high pass (timefreq)
Inputs
Outputs
Configurations
Scripting
math: / (component-wise fields container)
Inputs
Outputs
Configurations
Scripting
result: global sliding interface energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: kronecker product
Inputs
Outputs
Configurations
Scripting
math: modulus (fields container)
Inputs
Outputs
Configurations
Scripting
result: joint relative angular velocity
Inputs
Outputs
Configurations
Scripting
math: dot (complex fields)
Inputs
Outputs
Configurations
Scripting
result: gasket stress XZ
Inputs
Outputs
Configurations
Scripting
math: / (complex fields)
Inputs
Outputs
Configurations
Scripting
utility: unitary field
Inputs
Outputs
Configurations
Scripting
utility: server path
Inputs
Outputs
Configurations
Scripting
result: beam axial force (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: derivate (complex fields)
Inputs
Outputs
Configurations
Scripting
math: polar to complex fields
Inputs
Outputs
Configurations
Scripting
utility: merge data tree
Inputs
Outputs
Configurations
Scripting
math: dot (fields container)
Inputs
Outputs
Configurations
Scripting
result: nodal moment
Inputs
Outputs
Configurations
Scripting
math: phase (field)
Inputs
Outputs
Configurations
Scripting
math: phase (fields container)
Inputs
Outputs
Configurations
Scripting
math: modulus (field)
Inputs
Outputs
Configurations
Scripting
result: elemental mass
Inputs
Outputs
Configurations
Scripting
result: heat flux
Inputs
Outputs
Configurations
Scripting
math: total sum (fields container)
Inputs
Outputs
Configurations
Scripting
result: co-energy
Inputs
Outputs
Configurations
Scripting
math: dot
Inputs
Outputs
Configurations
Scripting
math: outer product
Inputs
Outputs
Configurations
Scripting
math: overall dot
Inputs
Outputs
Configurations
Scripting
math: relative error
Inputs
Outputs
Configurations
Scripting
result: velocity Y
Inputs
Outputs
Configurations
Scripting
result: reaction force Y
Inputs
Outputs
Configurations
Scripting
result: global velocity (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: superficial velocity
Inputs
Outputs
Configurations
Scripting
math: absolute value by component (field)
Inputs
Outputs
Configurations
Scripting
result: incremental energy
Inputs
Outputs
Configurations
Scripting
result: thermal strain
Inputs
Outputs
Configurations
Scripting
result: stiffness matrix energy
Inputs
Outputs
Configurations
Scripting
math: absolute value by component (fields container)
Inputs
Outputs
Configurations
Scripting
metadata: element types provider
Inputs
Outputs
Configurations
Scripting
result: total temperature
Inputs
Outputs
Configurations
Scripting
result: cyclic equivalent mass
Inputs
Outputs
Configurations
Scripting
result: acceleration Y
Inputs
Outputs
Configurations
Scripting
utility: delegate to operator
Inputs
Outputs
Configurations
Scripting
logic: component selector (fields container)
Inputs
Outputs
Configurations
Scripting
logic: component selector (field)
Inputs
Outputs
Configurations
Scripting
scoping: on property
Inputs
Outputs
Configurations
Scripting
result: stress intensity
Inputs
Outputs
Configurations
Scripting
logic: same property fields?
Inputs
Outputs
Configurations
Scripting
logic: elementary data selector (fields container)
Inputs
Outputs
Configurations
Scripting
utility: convert to scoping
Inputs
Outputs
Configurations
Scripting
logic: elementary data selector (field)
Inputs
Outputs
Configurations
Scripting
utility: change location
Inputs
Outputs
Configurations
Scripting
mesh: node coordinates
Inputs
Outputs
Configurations
Scripting
mesh: stl export
Inputs
Outputs
Configurations
Scripting
utility: bind support
Inputs
Outputs
Configurations
Scripting
utility: convert to meshes container
Inputs
Outputs
Configurations
Scripting
result: beam torsional moment (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: equivalent stress parameter
Inputs
Outputs
Configurations
Scripting
utility: convert to field
Inputs
Outputs
Configurations
Scripting
result: beam axial total strain (LSDyna)
Inputs
Outputs
Configurations
Scripting
utility: set property
Inputs
Outputs
Configurations
Scripting
result: electric flux density Y
Inputs
Outputs
Configurations
Scripting
utility: forward field
Inputs
Outputs
Configurations
Scripting
utility: forward fields container
Inputs
Outputs
Configurations
Scripting
result: electric flux density
Inputs
Outputs
Configurations
Scripting
geo: integrate over elements
Inputs
Outputs
Configurations
Scripting
result: plastic strain principal 2
Inputs
Outputs
Configurations
Scripting
utility: forward meshes container
Inputs
Outputs
Configurations
Scripting
result: compute total strain X
Configurating operators
Get the XX normal component (00 component).">Inputs
Outputs
Configurations
Scripting
utility: forward
Inputs
Outputs
Configurations
Scripting
utility: incremental meshes container
Inputs
Outputs
Configurations
Scripting
utility: txt file to dpf
Inputs
Outputs
Configurations
Scripting
utility: bind support (fields container)
Inputs
Outputs
Configurations
Scripting
utility: fields container get attribute
Inputs
Outputs
Configurations
Scripting
result: thermal strain XZ
Inputs
Outputs
Configurations
Scripting
utility: assemble scalars to vector
Inputs
Outputs
Configurations
Scripting
result: global eroded hourglass energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
utility: assemble scalars to vector fc
Inputs
Outputs
Configurations
Scripting
utility: assemble scalars to matrix
Inputs
Outputs
Configurations
Scripting
math: make one on component
Inputs
Outputs
Configurations
Scripting
mesh: from scopings
Inputs
Outputs
Configurations
Scripting
utility: assemble scalars to matrix fc
Inputs
Outputs
Configurations
Scripting
result: pres to field
Inputs
Outputs
Configurations
Scripting
result: part internal energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: part momentum (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: compute invariant terms rbd
Inputs
Outputs
Configurations
Scripting
utility: default value
Inputs
Outputs
Configurations
Scripting
averaging: elemental nodal to nodal elemental (fields container)
Inputs
Outputs
Configurations
Scripting
result: rms velocity
Inputs
Outputs
Configurations
Scripting
result: acceleration X
Inputs
Outputs
Configurations
Scripting
result: poynting vector
Inputs
Outputs
Configurations
Scripting
result: total strain (LSDyna)
Inputs
Outputs
Configurations
Scripting
averaging: elemental difference (fields container)
Inputs
Outputs
Configurations
Scripting
utility: compute time scoping
Inputs
Outputs
Configurations
Scripting
result: static pressure
Inputs
Outputs
Configurations
Scripting
result: elastic strain
Inputs
Outputs
Configurations
Scripting
math: window bartlett (fields container)
Inputs
Outputs
Configurations
Scripting
result: turbulent viscosity
Inputs
Outputs
Configurations
Scripting
utility: python generator
Inputs
Outputs
Configurations
Scripting
utility: make overall
Inputs
Outputs
Configurations
Scripting
geo: elements volume
Inputs
Outputs
Configurations
Scripting
result: pressure
Inputs
Outputs
Configurations
Scripting
result: stress
Inputs
Outputs
Configurations
Scripting
result: stress X
Inputs
Outputs
Configurations
Scripting
result: gasket thermal closure XZ
Inputs
Outputs
Configurations
Scripting
result: stress Y
Inputs
Outputs
Configurations
Scripting
result: stress Z
Inputs
Outputs
Configurations
Scripting
result: gasket thermal closure XY
Inputs
Outputs
Configurations
Scripting
result: stress XY
Inputs
Outputs
Configurations
Scripting
compression: kMeans clustering
Inputs
Outputs
Configurations
Scripting
result: stress YZ
Inputs
Outputs
Configurations
Scripting
result: modal basis
Inputs
Outputs
Configurations
Scripting
result: stress XZ
Inputs
Outputs
Configurations
Scripting
utility: merge string fields
Inputs
Outputs
Configurations
Scripting
result: stress principal 1
Inputs
Outputs
Configurations
Scripting
result: stress principal 2
Inputs
Outputs
Configurations
Scripting
invariant: convertnum bcs to nod
Inputs
Outputs
Configurations
Scripting
result: stress principal 3
Inputs
Outputs
Configurations
Scripting
result: stress von mises
Inputs
Outputs
Configurations
Scripting
result: gasket stress
Inputs
Outputs
Configurations
Scripting
result: gasket stress X
Inputs
Outputs
Configurations
Scripting
result: gasket stress XY
Inputs
Outputs
Configurations
Scripting
geo: elements facets surfaces over time
Inputs
Outputs
Configurations
Scripting
metadata: property field provider by property name
Inputs
Outputs
Configurations
Scripting
mesh: mesh provider
Inputs
Outputs
Configurations
Scripting
result: gasket inelastic closure
Inputs
Outputs
Configurations
Scripting
result: gasket inelastic closure X
Inputs
Outputs
Configurations
Scripting
result: write cms rbd file
Inputs
Outputs
Configurations
Scripting
serialization: export symbolic workflow
Inputs
Outputs
Configurations
Scripting
result: gasket inelastic closure XY
Inputs
Outputs
Configurations
Scripting
result: plastic strain eqv
Inputs
Outputs
Configurations
Scripting
mesh: beam properties
Inputs
Outputs
Configurations
Scripting
result: gasket inelastic closure XZ
Inputs
Outputs
Configurations
Scripting
result: gasket thermal closure
Inputs
Outputs
Configurations
Scripting
result: gasket thermal closure X
Inputs
Outputs
Configurations
Scripting
result: elastic strain X
Inputs
Outputs
Configurations
Scripting
result: elastic strain Y
Inputs
Outputs
Configurations
Scripting
result: elastic strain Z
Inputs
Outputs
Configurations
Scripting
math: min/max over time
Inputs
Outputs
Configurations
Scripting
utility: merge fields containers
Inputs
Outputs
Configurations
Scripting
result: global energy ratio without eroded energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
utility: merge weighted fields containers
Inputs
Outputs
Configurations
Scripting
result: elastic strain XY
Inputs
Outputs
Configurations
Scripting
result: elastic strain YZ
Inputs
Outputs
Configurations
Scripting
invariant: eigen values (fields container)
Inputs
Outputs
Configurations
Scripting
result: elastic strain XZ
Inputs
Outputs
Configurations
Scripting
metadata: mesh property provider
Inputs
Outputs
Configurations
Scripting
result: elastic strain principal 1
Inputs
Outputs
Configurations
Scripting
result: elastic strain principal 2
Inputs
Outputs
Configurations
Scripting
utility: merge scopings
Inputs
Outputs
Configurations
Scripting
result: elastic strain principal 3
Inputs
Outputs
Configurations
Scripting
result: cyclic analytic disp max
Inputs
Outputs
Configurations
Scripting
result: elastic strain eqv
Inputs
Outputs
Configurations
Scripting
result: elastic strain intensity
Inputs
Outputs
Configurations
Scripting
result: elastic strain max_shear
Inputs
Outputs
Configurations
Scripting
result: turbulent dissipation rate (omega)
Inputs
Outputs
Configurations
Scripting
averaging: to elemental (fields container)
Inputs
Outputs
Configurations
Scripting
result: plastic strain
Inputs
Outputs
Configurations
Scripting
scoping: transpose
Inputs
Outputs
Configurations
Scripting
result: mass fraction
Inputs
Outputs
Configurations
Scripting
result: plastic strain X
Inputs
Outputs
Configurations
Scripting
result: coordinates (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: plastic strain Y
Inputs
Outputs
Configurations
Scripting
filter: band pass (fields container)
Inputs
Outputs
Configurations
Scripting
geo: to polar coordinates
Inputs
Outputs
Configurations
Scripting
math: fft evaluation
Inputs
Outputs
Configurations
Scripting
result: global total energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: plastic strain Z
Inputs
Outputs
Configurations
Scripting
utility: merge materials
Inputs
Outputs
Configurations
Scripting
serialization: vtk export
Inputs
Outputs
Configurations
Scripting
result: dynamic viscosity
Inputs
Outputs
Configurations
Scripting
result: plastic strain XY
Inputs
Outputs
Configurations
Scripting
result: hydrostatic pressure
Inputs
Outputs
Configurations
Scripting
result: compute stress von mises
Inputs
Outputs
Configurations
Scripting
filter: low pass (scoping)
Inputs
Outputs
Configurations
Scripting
mesh: iso surfaces
Inputs
Outputs
Configurations
Scripting
result: plastic strain YZ
Inputs
Outputs
Configurations
Scripting
result: plastic strain XZ
Inputs
Outputs
Configurations
Scripting
result: workflow energy per harmonic
Inputs
Outputs
Configurations
Scripting
result: plastic strain principal 3
Inputs
Outputs
Configurations
Scripting
result: plastic strain intensity
Inputs
Outputs
Configurations
Scripting
result: plastic strain max_shear
Inputs
Outputs
Configurations
Scripting
filter: low pass (timefreq)
Inputs
Outputs
Configurations
Scripting
filter: band pass (field)
Inputs
Outputs
Configurations
Scripting
math: modal damping ratio
Inputs
Outputs
Configurations
Scripting
result: thermal strain X
Inputs
Outputs
Configurations
Scripting
result: thermal strain Y
Inputs
Outputs
Configurations
Scripting
math: accumulate level over label
Inputs
Outputs
Configurations
Scripting
result: equivalent radiated power
Inputs
Outputs
Configurations
Scripting
result: thermal strain Z
Inputs
Outputs
Configurations
Scripting
result: thermal strain XY
Inputs
Outputs
Configurations
Scripting
math: accumulate over label
Inputs
Outputs
Configurations
Scripting
utility: merge scopings containers
Inputs
Outputs
Configurations
Scripting
result: thermal strain YZ
Inputs
Outputs
Configurations
Scripting
result: thermal strain principal 1
Inputs
Outputs
Configurations
Scripting
result: thermal strain principal 2
Inputs
Outputs
Configurations
Scripting
result: thermal strain principal 3
Inputs
Outputs
Configurations
Scripting
result: wall shear stress
Inputs
Outputs
Configurations
Scripting
result: velocity
Inputs
Outputs
Configurations
Scripting
result: reaction force
Inputs
Outputs
Configurations
Scripting
serialization: serializer
Inputs
Outputs
Configurations
Scripting
result: velocity X
Inputs
Outputs
Configurations
Scripting
result: reaction force X
Inputs
Outputs
Configurations
Scripting
geo: cartesian to spherical coordinates (fields container)
Inputs
Outputs
Configurations
Scripting
result: global external work (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: acceleration
Inputs
Outputs
Configurations
Scripting
result: element centroids
Inputs
Outputs
Configurations
Scripting
result: acceleration Z
Inputs
Outputs
Configurations
Scripting
scoping: rescope (fields container)
Inputs
Outputs
Configurations
Scripting
serialization: workflow to workflow_topology
Inputs
Outputs
Configurations
Scripting
result: displacement
Inputs
Outputs
Configurations
Scripting
result: displacement X
Inputs
Outputs
Configurations
Scripting
averaging: force_summation
Inputs
Outputs
Configurations
Scripting
result: displacement Y
Inputs
Outputs
Configurations
Scripting
result: displacement Z
Inputs
Outputs
Configurations
Scripting
result: heat flux X
Inputs
Outputs
Configurations
Scripting
result: heat flux Y
Inputs
Outputs
Configurations
Scripting
result: electric field
Inputs
Outputs
Configurations
Scripting
result: heat flux Z
Inputs
Outputs
Configurations
Scripting
serialization: serialize to hdf5
Inputs
Outputs
Configurations
Scripting
result: element orientations
Inputs
Outputs
Configurations
Scripting
result: element orientations Y
Inputs
Outputs
Configurations
Scripting
mesh: split mesh wrt property
Inputs
Outputs
Configurations
Scripting
result: element orientations Z
Inputs
Outputs
Configurations
Scripting
result: element nodal forces
Inputs
Outputs
Configurations
Scripting
result: compute total strain Z
Outputs
Configurations
Scripting
result: gasket inelastic closure
Inputs
Outputs
Configurations
Scripting
result: gasket inelastic closure X
Inputs
Outputs
Configurations
Scripting
result: write cms rbd file
Inputs
Outputs
Configurations
Scripting
serialization: export symbolic workflow
Inputs
Outputs
Configurations
Scripting
result: gasket inelastic closure XY
Inputs
Outputs
Configurations
Scripting
result: plastic strain eqv
Inputs
Outputs
Configurations
Scripting
mesh: beam properties
Inputs
Outputs
Configurations
Scripting
result: gasket inelastic closure XZ
Inputs
Outputs
Configurations
Scripting
result: gasket thermal closure
Inputs
Outputs
Configurations
Scripting
result: gasket thermal closure X
Inputs
Outputs
Configurations
Scripting
result: elastic strain X
Inputs
Outputs
Configurations
Scripting
result: elastic strain Y
Inputs
Outputs
Configurations
Scripting
result: elastic strain Z
Inputs
Outputs
Configurations
Scripting
math: min/max over time
Inputs
Outputs
Configurations
Scripting
utility: merge fields containers
Inputs
Outputs
Configurations
Scripting
result: global energy ratio without eroded energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
utility: merge weighted fields containers
Inputs
Outputs
Configurations
Scripting
result: elastic strain XY
Inputs
Outputs
Configurations
Scripting
result: elastic strain YZ
Inputs
Outputs
Configurations
Scripting
invariant: eigen values (fields container)
Inputs
Outputs
Configurations
Scripting
result: elastic strain XZ
Inputs
Outputs
Configurations
Scripting
metadata: mesh property provider
Inputs
Outputs
Configurations
Scripting
result: elastic strain principal 1
Inputs
Outputs
Configurations
Scripting
result: elastic strain principal 2
Inputs
Outputs
Configurations
Scripting
utility: merge scopings
Inputs
Outputs
Configurations
Scripting
result: elastic strain principal 3
Inputs
Outputs
Configurations
Scripting
result: cyclic analytic disp max
Inputs
Outputs
Configurations
Scripting
result: elastic strain eqv
Inputs
Outputs
Configurations
Scripting
result: electric flux density X
Inputs
Outputs
Configurations
Scripting
result: elastic strain intensity
Inputs
Outputs
Configurations
Scripting
result: elastic strain max_shear
Inputs
Outputs
Configurations
Scripting
result: turbulent dissipation rate (omega)
Inputs
Outputs
Configurations
Scripting
averaging: to elemental (fields container)
Inputs
Outputs
Configurations
Scripting
result: plastic strain
Inputs
Outputs
Configurations
Scripting
scoping: transpose
Inputs
Outputs
Configurations
Scripting
result: mass fraction
Inputs
Outputs
Configurations
Scripting
result: plastic strain X
Inputs
Outputs
Configurations
Scripting
result: coordinates (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: plastic strain Y
Inputs
Outputs
Configurations
Scripting
filter: band pass (fields container)
Inputs
Outputs
Configurations
Scripting
geo: to polar coordinates
Inputs
Outputs
Configurations
Scripting
math: fft evaluation
Inputs
Outputs
Configurations
Scripting
result: global total energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: plastic strain Z
Inputs
Outputs
Configurations
Scripting
utility: merge materials
Inputs
Outputs
Configurations
Scripting
serialization: vtk export
Inputs
Outputs
Configurations
Scripting
result: dynamic viscosity
Inputs
Outputs
Configurations
Scripting
result: plastic strain XY
Inputs
Outputs
Configurations
Scripting
result: hydrostatic pressure
Inputs
Outputs
Configurations
Scripting
result: compute stress von mises
Inputs
Outputs
Configurations
Scripting
filter: low pass (scoping)
Inputs
Outputs
Configurations
Scripting
mesh: iso surfaces
Inputs
Outputs
Configurations
Scripting
result: plastic strain YZ
Inputs
Outputs
Configurations
Scripting
result: plastic strain XZ
Inputs
Outputs
Configurations
Scripting
result: workflow energy per harmonic
Inputs
Outputs
Configurations
Scripting
result: plastic strain principal 3
Inputs
Outputs
Configurations
Scripting
result: plastic strain intensity
Inputs
Outputs
Configurations
Scripting
result: plastic strain max_shear
Inputs
Outputs
Configurations
Scripting
filter: low pass (timefreq)
Inputs
Outputs
Configurations
Scripting
filter: band pass (field)
Inputs
Outputs
Configurations
Scripting
math: modal damping ratio
Inputs
Outputs
Configurations
Scripting
result: thermal strain X
Inputs
Outputs
Configurations
Scripting
result: thermal strain Y
Inputs
Outputs
Configurations
Scripting
result: equivalent radiated power
Inputs
Outputs
Configurations
Scripting
result: thermal strain Z
Inputs
Outputs
Configurations
Scripting
utility: merge scopings containers
Inputs
Outputs
Configurations
Scripting
math: accumulate over label
Inputs
Outputs
Configurations
Scripting
result: thermal strain XY
Inputs
Outputs
Configurations
Scripting
result: thermal strain YZ
Inputs
Outputs
Configurations
Scripting
result: thermal strain principal 1
Inputs
Outputs
Configurations
Scripting
result: thermal strain principal 2
Inputs
Outputs
Configurations
Scripting
result: thermal strain principal 3
Inputs
Outputs
Configurations
Scripting
serialization: serializer
Inputs
Outputs
Configurations
Scripting
result: wall shear stress
Inputs
Outputs
Configurations
Scripting
result: velocity
Inputs
Outputs
Configurations
Scripting
result: reaction force
Inputs
Outputs
Configurations
Scripting
result: velocity X
Inputs
Outputs
Configurations
Scripting
result: reaction force X
Inputs
Outputs
Configurations
Scripting
geo: cartesian to spherical coordinates (fields container)
Inputs
Outputs
Configurations
Scripting
result: global external work (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: acceleration
Inputs
Outputs
Configurations
Scripting
result: element centroids
Inputs
Outputs
Configurations
Scripting
result: acceleration Z
Inputs
Outputs
Configurations
Scripting
scoping: rescope (fields container)
Inputs
Outputs
Configurations
Scripting
serialization: workflow to workflow_topology
Inputs
Outputs
Configurations
Scripting
result: displacement
Inputs
Outputs
Configurations
Scripting
result: electric field Z
Inputs
Outputs
Configurations
Scripting
result: displacement X
Inputs
Outputs
Configurations
Scripting
averaging: force_summation
Inputs
Outputs
Configurations
Scripting
result: displacement Y
Inputs
Outputs
Configurations
Scripting
result: electric field X
Inputs
Outputs
Configurations
Scripting
result: displacement Z
Inputs
Outputs
Configurations
Scripting
result: heat flux X
Inputs
Outputs
Configurations
Scripting
result: heat flux Y
Inputs
Outputs
Configurations
Scripting
result: electric field
Inputs
Outputs
Configurations
Scripting
result: heat flux Z
Inputs
Outputs
Configurations
Scripting
serialization: serialize to hdf5
Inputs
Outputs
Configurations
Scripting
result: element orientations
Inputs
Outputs
Configurations
Scripting
result: element orientations Y
Inputs
Outputs
Configurations
Scripting
result: magnetic field Z
Inputs
Outputs
Configurations
Scripting
averaging: elemental mean (fields container)
Inputs
Outputs
Configurations
Scripting
result: magnetic field Y
Inputs
Outputs
Configurations
Scripting
mesh: split mesh wrt property
Inputs
Outputs
Configurations
Scripting
result: element orientations Z
Inputs
Outputs
Configurations
Scripting
result: temperature gradient
Inputs
Outputs
Configurations
Scripting
result: cgns result provider
Inputs
Outputs
Configurations
Scripting
result: temperature gradient X
Inputs
Outputs
Configurations
Scripting
result: entropy
Inputs
Outputs
Configurations
Scripting
result: volume fraction
Inputs
Outputs
Configurations
Scripting
result: temperature gradient Y
Inputs
Outputs
Configurations
Scripting
result: temperature gradient Z
Inputs
Outputs
Configurations
Scripting
result: elemental summable miscellaneous data
Inputs
Outputs
Configurations
Scripting
result: magnetic field
Inputs
Outputs
Configurations
Scripting
result: compute stress Z
Inputs
Outputs
Configurations
Scripting
result: part eroded kinetic energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: magnetic field X
Inputs
Outputs
Configurations
Scripting
serialization: string deserializer
Inputs
Outputs
Configurations
Scripting
utility: merge meshes containers
Inputs
Outputs
Configurations
Scripting
result: magnetic flux density
Inputs
Outputs
Configurations
Scripting
result: magnetic flux density X
Inputs
Outputs
Configurations
Scripting
result: magnetic flux density Y
Inputs
Outputs
Configurations
Scripting
result: nodal force
Inputs
Outputs
Configurations
Scripting
result: magnetic flux density Z
Inputs
Outputs
Configurations
Scripting
result: electric field Y
Inputs
Outputs
Configurations
Scripting
result: electric flux density Z
Inputs
Outputs
Configurations
Scripting
result: element nodal forces
Inputs
Outputs
Configurations
Scripting
result: compute total strain Z
Configurating operators
Only linear analysis are supported without On Demand Expansion.
All coordinates are global coordinates.
Euler Angles need to be included in the database.
- Get the ZZ normal component (22 component).">Inputs
Outputs
Configurations
Scripting
result: structural temperature
Inputs
Outputs
Configurations
Scripting
metadata: result info provider
Inputs
Outputs
Configurations
Scripting
mesh: skin (tri mesh)
Inputs
Outputs
Configurations
Scripting
result: stress ratio
Inputs
Outputs
Configurations
Scripting
filter: band pass (timescoping)
Inputs
Outputs
Configurations
Scripting
compression: sketch matrix
Inputs
Outputs
Configurations
Scripting
result: accu eqv plastic strain
Inputs
Outputs
Configurations
Scripting
result: plastic state variable
Inputs
Outputs
Configurations
Scripting
math: average over label
Inputs
Outputs
Configurations
Scripting
result: accu eqv creep strain
Inputs
Outputs
Configurations
Scripting
result: plastic strain energy density
Inputs
Outputs
Configurations
Scripting
result: material property of element
Inputs
Outputs
Configurations
Scripting
result: creep strain energy density
Inputs
Outputs
Configurations
Scripting
result: erp radiation efficiency
Inputs
Outputs
Configurations
Scripting
result: elastic strain energy density
Inputs
Outputs
Configurations
Scripting
serialization: field to csv
Inputs
Outputs
Configurations
Scripting
utility: merge generic data container
Inputs
Outputs
Configurations
Scripting
result: global joint internal energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded element nodal forces
Inputs
Outputs
Configurations
Scripting
serialization: vtk to fields
Inputs
Outputs
Configurations
Scripting
utility: merge any objects
Inputs
Outputs
Configurations
Scripting
result: elemental volume
Inputs
Outputs
Configurations
Scripting
result: artificial hourglass energy
Inputs
Outputs
Configurations
Scripting
result: kinetic energy
Inputs
Outputs
Configurations
Scripting
result: thermal dissipation energy
Inputs
Outputs
Configurations
Scripting
result: nodal force
Inputs
Outputs
Configurations
Scripting
result: total mass
Inputs
Outputs
Configurations
Scripting
result: rms static pressure
Inputs
Outputs
Configurations
Scripting
result: swelling strains
Inputs
Outputs
Configurations
Scripting
result: temperature
Inputs
Outputs
Configurations
Scripting
result: compute stress
Inputs
Outputs
Configurations
Scripting
result: raw displacement
Inputs
Outputs
Configurations
Scripting
result: raw reaction force
Inputs
Outputs
Configurations
Scripting
result: turbulent kinetic energy (k)
Inputs
Outputs
Configurations
Scripting
result: electric potential
Inputs
Outputs
Configurations
Scripting
result: thickness
Inputs
Outputs
Configurations
Scripting
result: mapdl run
Inputs
Outputs
Configurations
Scripting
result: equivalent mass
Inputs
Outputs
Configurations
Scripting
result: custom result
Inputs
Outputs
Configurations
Scripting
result: elemental heat generation
Inputs
Outputs
Configurations
Scripting
result: temperature gradient
Inputs
Outputs
Configurations
Scripting
result: elemental summable miscellaneous data
Inputs
Outputs
Configurations
Scripting
result: magnetic field
Inputs
Outputs
Configurations
Scripting
utility: merge meshes containers
Inputs
Outputs
Configurations
Scripting
result: magnetic flux density
Inputs
Outputs
Configurations
Scripting
result: magnetic vector potential
Inputs
Outputs
Configurations
Scripting
result: joint force reaction
Inputs
Outputs
Configurations
Scripting
result: joint moment reaction
Inputs
Outputs
Configurations
Scripting
result: beam T shear force (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: joint relative displacement
Inputs
Outputs
Configurations
Scripting
result: joint relative rotation
Inputs
Outputs
Configurations
Scripting
result: joint relative velocity
Inputs
Outputs
Configurations
Scripting
result: joint relative acceleration
Inputs
Outputs
Configurations
Scripting
result: joint relative angular acceleration
Inputs
Outputs
Configurations
Scripting
result: global internal energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
serialization: txt to data tree
Inputs
Outputs
Configurations
Scripting
result: thermal strains eqv
Inputs
Outputs
Configurations
Scripting
result: elemental non summable miscellaneous data
Inputs
Outputs
Configurations
Scripting
utility: merge supports
Inputs
Outputs
Configurations
Scripting
result: global kinetic energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: matrix inverse
Inputs
Outputs
Configurations
Scripting
result: global time step (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: global rigid body stopper energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
geo: cartesian to spherical coordinates
Inputs
Outputs
Configurations
Scripting
result: global spring and damper energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: beam T bending moment (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: global hourglass energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: global system damping energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: global eroded kinetic energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: global energy ratio (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: global added mass (LSDyna)
Inputs
Outputs
Configurations
Scripting
mapping: on reduced coordinates
Inputs
Outputs
Configurations
Scripting
invariant: principal invariants (fields container)
Inputs
Outputs
Configurations
Scripting
result: global added mass (percentage) (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: global center of mass (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: beam S shear force (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: beam S bending moment (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: beam RS shear stress (LSDyna)
Inputs
Outputs
Configurations
Scripting
utility: merge fields into field matrix
Inputs
Outputs
Configurations
Scripting
result: beam axial plastic strain (LSDyna)
Inputs
Outputs
Configurations
Scripting
invariant: von mises eqv (field)
Inputs
Outputs
Configurations
Scripting
invariant: segalman von mises eqv (field)
Inputs
Outputs
Configurations
Scripting
result: part eroded internal energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: part kinetic energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
scoping: on mesh property
Inputs
Outputs
Configurations
Scripting
serialization: string deserializer
Inputs
Outputs
Configurations
Scripting
result: compute stress Z
Inputs
Outputs
Configurations
Scripting
result: part eroded kinetic energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
scoping: from mesh
Inputs
Outputs
Configurations
Scripting
result: part added mass (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: part hourglass energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: part rigid body velocity (LSDyna)
Inputs
Outputs
Configurations
Scripting
min_max: time of max
Inputs
Outputs
Configurations
Scripting
scoping: nodes in mesh
Inputs
Outputs
Configurations
Scripting
metadata: cyclic support provider
Inputs
Outputs
Configurations
Scripting
result: density
Inputs
Outputs
Configurations
Scripting
result: spectrum data
Inputs
Outputs
Configurations
Scripting
averaging: elemental to elemental nodal (fields container)
Inputs
Outputs
Configurations
Scripting
result: total pressure
Inputs
Outputs
Configurations
Scripting
result: mean velocity
Inputs
Outputs
Configurations
Scripting
result: Lighthill tensor divergence
Inputs
Outputs
Configurations
Scripting
result: entropy
Inputs
Outputs
Configurations
Scripting
result: volume fraction
Inputs
Outputs
Configurations
Scripting
result: mass flow rate
Inputs
Outputs
Configurations
Scripting
result: mach number
Inputs
Outputs
Configurations
Scripting
result: rms temperature
Inputs
Outputs
Configurations
Scripting
scoping: scoping get attribute
Inputs
Outputs
Configurations
Scripting
result: read cms_rbd file
Inputs
Outputs
Configurations
Scripting
result: mean temperature
Inputs
Outputs
Configurations
Scripting
min_max: over fields container
Inputs
Outputs
Configurations
Scripting
result: surface heat rate
Inputs
Outputs
Configurations
Scripting
result: thermal conductivity
Inputs
Outputs
Configurations
Scripting
result: specific heat
Inputs
Outputs
Configurations
Scripting
result: turbulent dissipation rate (epsilon)
Inputs
Outputs
Configurations
Scripting
metadata: time freq provider
Inputs
Outputs
Configurations
Scripting
metadata: mesh info provider
Inputs
Outputs
Configurations
Scripting
result: von mises stresses as mechanical
Inputs
Outputs
Configurations
Scripting
metadata: streams provider
Inputs
Outputs
Configurations
Scripting
result: poynting vector surface
Inputs
Outputs
Configurations
Scripting
metadata: datasources provider
Inputs
Outputs
Configurations
Scripting
scoping: rescope
Inputs
Outputs
Configurations
Scripting
filter: low pass (timescoping)
Inputs
Outputs
Configurations
Scripting
utility: data sources get attribute
Inputs
Outputs
Configurations
Scripting
mesh: meshes provider
Inputs
Outputs
Configurations
Scripting
metadata: mesh selection manager provider
Inputs
Outputs
Configurations
Scripting
utility: for each
Inputs
Outputs
Configurations
Scripting
metadata: boundary condition provider
Inputs
Outputs
Configurations
Scripting
utility: merge property fields
Inputs
Outputs
Configurations
Scripting
metadata: cyclic analysis?
Inputs
Outputs
Configurations
Scripting
metadata: material support provider
Inputs
Outputs
Configurations
Scripting
scoping: on named selection
Inputs
Outputs
Configurations
Scripting
scoping: reduce sampling scoping
Inputs
Outputs
Configurations
Scripting
math: accumulation per scoping
Inputs
Outputs
Configurations
Scripting
result: coordinate system
Inputs
Outputs
Configurations
Scripting
logic: splitter::streams
Inputs
Outputs
Configurations
Scripting
serialization: hdf5dpf custom read
Inputs
Outputs
Configurations
Scripting
result: nodal_to_global
Inputs
Outputs
Configurations
Scripting
scoping: rescope custom type field
Inputs
Outputs
Configurations
Scripting
result: global_to_nodal
Inputs
Outputs
Configurations
Scripting
min_max: min max by entity over time
Inputs
Outputs
Configurations
Scripting
min_max: max over time
Inputs
Outputs
Configurations
Scripting
scoping: connectivity ids
Inputs
Outputs
Configurations
Scripting
utility: remote workflow instantiate
Inputs
Outputs
Configurations
Scripting
utility: remote operator instantiate
Inputs
Outputs
Configurations
Scripting
math: compute residual and error
Inputs
Outputs
Configurations
Scripting
result: structural temperature
Inputs
Outputs
Configurations
Scripting
metadata: result info provider
Inputs
Outputs
Configurations
Scripting
mesh: skin (tri mesh)
Inputs
Outputs
Configurations
Scripting
result: stress ratio
Inputs
Outputs
Configurations
Scripting
filter: band pass (timescoping)
Inputs
Outputs
Configurations
Scripting
compression: sketch matrix
Inputs
Outputs
Configurations
Scripting
result: accu eqv plastic strain
Inputs
Outputs
Configurations
Scripting
result: plastic state variable
Inputs
Outputs
Configurations
Scripting
result: accu eqv creep strain
Inputs
Outputs
Configurations
Scripting
result: plastic strain energy density
Inputs
Outputs
Configurations
Scripting
result: material property of element
Inputs
Outputs
Configurations
Scripting
result: creep strain energy density
Inputs
Outputs
Configurations
Scripting
result: erp radiation efficiency
Inputs
Outputs
Configurations
Scripting
result: elastic strain energy density
Inputs
Outputs
Configurations
Scripting
serialization: field to csv
Inputs
Outputs
Configurations
Scripting
utility: merge generic data container
Inputs
Outputs
Configurations
Scripting
result: global joint internal energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded element nodal forces
Inputs
Outputs
Configurations
Scripting
serialization: vtk to fields
Inputs
Outputs
Configurations
Scripting
utility: merge any objects
Inputs
Outputs
Configurations
Scripting
result: elemental volume
Inputs
Outputs
Configurations
Scripting
result: artificial hourglass energy
Inputs
Outputs
Configurations
Scripting
result: kinetic energy
Inputs
Outputs
Configurations
Scripting
result: thermal dissipation energy
Inputs
Outputs
Configurations
Scripting
result: total mass
Inputs
Outputs
Configurations
Scripting
result: rms static pressure
Inputs
Outputs
Configurations
Scripting
result: swelling strains
Inputs
Outputs
Configurations
Scripting
result: temperature
Inputs
Outputs
Configurations
Scripting
result: compute stress
Inputs
Outputs
Configurations
Scripting
result: raw displacement
Inputs
Outputs
Configurations
Scripting
result: raw reaction force
Inputs
Outputs
Configurations
Scripting
result: turbulent kinetic energy (k)
Inputs
Outputs
Configurations
Scripting
result: electric potential
Inputs
Outputs
Configurations
Scripting
result: thickness
Inputs
Outputs
Configurations
Scripting
result: mapdl run
Inputs
Outputs
Configurations
Scripting
result: equivalent mass
Inputs
Outputs
Configurations
Scripting
result: custom result
Inputs
Outputs
Configurations
Scripting
result: elemental heat generation
Inputs
Outputs
Configurations
Scripting
result: magnetic vector potential
Inputs
Outputs
Configurations
Scripting
result: joint force reaction
Inputs
Outputs
Configurations
Scripting
result: joint moment reaction
Inputs
Outputs
Configurations
Scripting
result: beam T shear force (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: joint relative displacement
Inputs
Outputs
Configurations
Scripting
result: joint relative rotation
Inputs
Outputs
Configurations
Scripting
result: joint relative velocity
Inputs
Outputs
Configurations
Scripting
result: joint relative acceleration
Inputs
Outputs
Configurations
Scripting
result: joint relative angular acceleration
Inputs
Outputs
Configurations
Scripting
result: global internal energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
serialization: txt to data tree
Inputs
Outputs
Configurations
Scripting
result: thermal strains eqv
Inputs
Outputs
Configurations
Scripting
result: elemental non summable miscellaneous data
Inputs
Outputs
Configurations
Scripting
utility: merge supports
Inputs
Outputs
Configurations
Scripting
result: global kinetic energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: matrix inverse
Inputs
Outputs
Configurations
Scripting
result: global time step (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: global rigid body stopper energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
geo: cartesian to spherical coordinates
Inputs
Outputs
Configurations
Scripting
result: global spring and damper energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: beam T bending moment (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: global hourglass energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: global system damping energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: global eroded kinetic energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: global energy ratio (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: global added mass (LSDyna)
Inputs
Outputs
Configurations
Scripting
mapping: on reduced coordinates
Inputs
Outputs
Configurations
Scripting
invariant: principal invariants (fields container)
Inputs
Outputs
Configurations
Scripting
result: global added mass (percentage) (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: global center of mass (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: beam S shear force (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: beam S bending moment (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: beam RS shear stress (LSDyna)
Inputs
Outputs
Configurations
Scripting
utility: merge fields into field matrix
Inputs
Outputs
Configurations
Scripting
result: beam axial plastic strain (LSDyna)
Inputs
Outputs
Configurations
Scripting
invariant: von mises eqv (field)
Inputs
Outputs
Configurations
Scripting
invariant: segalman von mises eqv (field)
Inputs
Outputs
Configurations
Scripting
result: part eroded internal energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: part kinetic energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
scoping: on mesh property
Inputs
Outputs
Configurations
Scripting
scoping: from mesh
Inputs
Outputs
Configurations
Scripting
result: part added mass (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: part hourglass energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: part rigid body velocity (LSDyna)
Inputs
Outputs
Configurations
Scripting
min_max: time of max
Inputs
Outputs
Configurations
Scripting
scoping: nodes in mesh
Inputs
Outputs
Configurations
Scripting
metadata: cyclic support provider
Inputs
Outputs
Configurations
Scripting
result: density
Inputs
Outputs
Configurations
Scripting
result: spectrum data
Inputs
Outputs
Configurations
Scripting
averaging: elemental to elemental nodal (fields container)
Inputs
Outputs
Configurations
Scripting
result: total pressure
Inputs
Outputs
Configurations
Scripting
result: mean velocity
Inputs
Outputs
Configurations
Scripting
result: Lighthill tensor divergence
Inputs
Outputs
Configurations
Scripting
result: mass flow rate
Inputs
Outputs
Configurations
Scripting
result: mach number
Inputs
Outputs
Configurations
Scripting
result: rms temperature
Inputs
Outputs
Configurations
Scripting
scoping: scoping get attribute
Inputs
Outputs
Configurations
Scripting
result: read cms_rbd file
Inputs
Outputs
Configurations
Scripting
result: mean temperature
Inputs
Outputs
Configurations
Scripting
result: surface heat rate
Inputs
Outputs
Configurations
Scripting
result: thermal conductivity
Inputs
Outputs
Configurations
Scripting
result: specific heat
Inputs
Outputs
Configurations
Scripting
result: turbulent dissipation rate (epsilon)
Inputs
Outputs
Configurations
Scripting
metadata: time freq provider
Inputs
Outputs
Configurations
Scripting
metadata: mesh info provider
Inputs
Outputs
Configurations
Scripting
result: von mises stresses as mechanical
Inputs
Outputs
Configurations
Scripting
metadata: streams provider
Inputs
Outputs
Configurations
Scripting
result: poynting vector surface
Inputs
Outputs
Configurations
Scripting
metadata: datasources provider
Inputs
Outputs
Configurations
Scripting
scoping: rescope
Inputs
Outputs
Configurations
Scripting
filter: low pass (timescoping)
Inputs
Outputs
Configurations
Scripting
utility: data sources get attribute
Inputs
Outputs
Configurations
Scripting
mesh: meshes provider
Inputs
Outputs
Configurations
Scripting
metadata: mesh selection manager provider
Inputs
Outputs
Configurations
Scripting
utility: for each
Inputs
Outputs
Configurations
Scripting
metadata: boundary condition provider
Inputs
Outputs
Configurations
Scripting
utility: merge property fields
Inputs
Outputs
Configurations
Scripting
metadata: cyclic analysis?
Inputs
Outputs
Configurations
Scripting
metadata: material support provider
Inputs
Outputs
Configurations
Scripting
scoping: on named selection
Inputs
Outputs
Configurations
Scripting
scoping: reduce sampling scoping
Inputs
Outputs
Configurations
Scripting
math: accumulation per scoping
Inputs
Outputs
Configurations
Scripting
metadata: real constants provider
Inputs
Outputs
Configurations
Scripting
metadata: coordinate system data provider
Inputs
Outputs
Configurations
Scripting
result: coordinate system
Inputs
Outputs
Configurations
Scripting
logic: splitter::streams
Inputs
Outputs
Configurations
Scripting
serialization: hdf5dpf custom read
Inputs
Outputs
Configurations
Scripting
result: nodal_to_global
Inputs
Outputs
Configurations
Scripting
scoping: rescope custom type field
Inputs
Outputs
Configurations
Scripting
result: global_to_nodal
Inputs
Outputs
Configurations
Scripting
min_max: min max by entity over time
Inputs
Outputs
Configurations
Scripting
min_max: max over time
Inputs
Outputs
Configurations
Scripting
scoping: connectivity ids
Inputs
Outputs
Configurations
Scripting
utility: remote workflow instantiate
Inputs
Outputs
Configurations
Scripting
utility: remote operator instantiate
Inputs
Outputs
Configurations
Scripting
math: compute residual and error
Configurating operators
2 for normalized by the max at a given time step of the first entry or residuals depending on the reference field option,
3 for normalized by the max over all time steps of the first entry or residuals depending on the reference field option" types="int32" optional="true"ellipsis="false"inplace="false">Outputs
Configurations
Scripting
result: add rigid body motion (fields container)
Inputs
Outputs
Configurations
Scripting
utility: merge time freq supports
Inputs
Outputs
Configurations
Scripting
min_max: incremental over fields container
Inputs
Outputs
Configurations
Scripting
scoping: split on property type
Inputs
Outputs
Configurations
Scripting
utility: overlap fields
Inputs
Outputs
Configurations
Scripting
averaging: elemental nodal to nodal elemental (field)
Inputs
Outputs
Configurations
Scripting
scoping: adapt with scopings container
Inputs
Outputs
Configurations
Scripting
utility: change shell layers
Inputs
Outputs
Configurations
Scripting
utility: merge meshes
Inputs
Outputs
Configurations
Scripting
utility: merge fields
Inputs
Outputs
Configurations
Scripting
utility: merge weighted fields
Inputs
Outputs
Configurations
Scripting
utility: merge fc to fc field matrices
Inputs
Outputs
Configurations
Scripting
filter: high pass (field)
Inputs
Outputs
Configurations
Scripting
min_max: max by component
Inputs
Outputs
Configurations
Scripting
utility: weighted merge fields by label
Inputs
Outputs
Configurations
Scripting
utility: merge fields by label
Inputs
Outputs
Configurations
Scripting
averaging: elemental to elemental nodal (field)
Inputs
Outputs
Configurations
Scripting
min_max: min max by entity
Inputs
Outputs
Configurations
Scripting
utility: merge collections
Inputs
Outputs
Configurations
Scripting
logic: merge solid and shell fields
Inputs
Outputs
Configurations
Scripting
min_max: min over time
Inputs
Outputs
Configurations
Scripting
geo: element nodal contribution
Inputs
Outputs
Configurations
Scripting
min_max: over label
Inputs
Outputs
Configurations
Scripting
min_max: min by component
Inputs
Outputs
Configurations
Scripting
serialization: serializer to string
Inputs
Outputs
Configurations
Scripting
serialization: deserializer
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded velocity
Inputs
Outputs
Configurations
Scripting
mesh: skin
Inputs
Outputs
Configurations
Scripting
utility: split in for each range
Inputs
Outputs
Configurations
Scripting
utility: make for each range
Inputs
Outputs
Configurations
Scripting
utility: incremental field
Inputs
Outputs
Configurations
Scripting
utility: incremental fields container
Inputs
Outputs
Configurations
Scripting
geo: rotate (fields container)
Inputs
Outputs
Configurations
Scripting
utility: incremental property field
Inputs
Outputs
Configurations
Scripting
utility: incremental mesh
Inputs
Outputs
Configurations
Scripting
mesh: points from coordinates
Inputs
Outputs
Configurations
Scripting
utility: incremental concantenate as fields container.
Inputs
Outputs
Configurations
Scripting
utility: make producer consumer for each iterator
Inputs
Outputs
Configurations
Scripting
utility: producer consumer for each
Inputs
Outputs
Configurations
Scripting
averaging: extend to mid nodes (field)
Inputs
Outputs
Configurations
Scripting
invariant: eigen vectors (on fields container)
Inputs
Outputs
Configurations
Scripting
mesh: mesh get attribute
Inputs
Outputs
Configurations
Scripting
metadata: time freq support get attribute
Inputs
Outputs
Configurations
Scripting
utility: set attribute
Inputs
Outputs
Configurations
Scripting
utility: field get attribute
Inputs
Outputs
Configurations
Scripting
min_max: time of min
Inputs
Outputs
Configurations
Scripting
min_max: max over phase
Inputs
Outputs
Configurations
Scripting
min_max: phase of max
Inputs
Outputs
Configurations
Scripting
utility: voigt to standard strains
Inputs
Outputs
Configurations
Scripting
utility: voigt to standard strains (fields container)
Inputs
Outputs
Configurations
Scripting
min_max: incremental over field
Inputs
Outputs
Configurations
Scripting
utility: workflow to pydpf generator
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (timefreq)
Inputs
Outputs
Configurations
Scripting
logic: same string fields?
Inputs
Outputs
Configurations
Scripting
logic: same meshes?
Inputs
Outputs
Configurations
Scripting
logic: same fields?
Inputs
Outputs
Configurations
Scripting
logic: fields included?
Inputs
Outputs
Configurations
Scripting
logic: same fields container?
Inputs
Outputs
Configurations
Scripting
filter: high pass (scoping)
Inputs
Outputs
Configurations
Scripting
filter: high pass (timescoping)
Inputs
Outputs
Configurations
Scripting
filter: high pass (fields container)
Inputs
Outputs
Configurations
Scripting
filter: low pass (field)
Inputs
Outputs
Configurations
Scripting
filter: low pass (fields container)
Inputs
Outputs
Configurations
Scripting
filter: band pass (scoping)
Inputs
Outputs
Configurations
Scripting
filter: band pass (timefreq)
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (scoping)
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (timescoping)
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (fields container)
Inputs
Outputs
Configurations
Scripting
serialization: csv to field
Inputs
Outputs
Configurations
Scripting
result: members in compression not certified
Inputs
Outputs
Configurations
Scripting
result: members in bending not certified
Inputs
Outputs
Configurations
Scripting
result: members in linear compression bending not certified
Inputs
Outputs
Configurations
Scripting
invariant: convertnum nod to bcs
Inputs
Outputs
Configurations
Scripting
geo: rotate
Inputs
Outputs
Configurations
Scripting
logic: enrich materials
Inputs
Outputs
Configurations
Scripting
serialization: data tree to json
Inputs
Outputs
Configurations
Scripting
serialization: data tree to txt
Inputs
Outputs
Configurations
Scripting
serialization: json to data tree
Inputs
Outputs
Configurations
Scripting
averaging: nodal difference (fields container)
Inputs
Outputs
Configurations
Scripting
logic: descending sort
Inputs
Outputs
Configurations
Scripting
logic: ascending sort (fields container)
Inputs
Outputs
Configurations
Scripting
logic: descending sort (fields container)
Inputs
Outputs
Configurations
Scripting
serialization: import symbolic workflow
Inputs
Outputs
Configurations
Scripting
filter: filtering max over time workflow
Inputs
Outputs
Configurations
Scripting
metadata: integrate over time freq
Inputs
Outputs
Configurations
Scripting
averaging: nodal difference (field)
Inputs
Outputs
Configurations
Scripting
result: compute stress YZ
Inputs
Outputs
Configurations
Scripting
logic: splitter::data_sources
Inputs
Outputs
Configurations
Scripting
averaging: to elemental nodal (fields container)
Inputs
Outputs
Configurations
Scripting
server: grpc start server
Inputs
Outputs
Configurations
Scripting
result: compute stress XY
Inputs
Outputs
Configurations
Scripting
utility: operator id
Inputs
Outputs
Configurations
Scripting
averaging: elemental nodal to nodal (field)
Inputs
Outputs
Configurations
Scripting
averaging: elemental nodal to nodal (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: elemental to nodal (field)
Outputs
Configurations
Scripting
result: add rigid body motion (fields container)
Inputs
Outputs
Configurations
Scripting
utility: merge time freq supports
Inputs
Outputs
Configurations
Scripting
min_max: incremental over fields container
Inputs
Outputs
Configurations
Scripting
scoping: split on property type
Inputs
Outputs
Configurations
Scripting
utility: overlap fields
Inputs
Outputs
Configurations
Scripting
averaging: elemental nodal to nodal elemental (field)
Inputs
Outputs
Configurations
Scripting
scoping: adapt with scopings container
Inputs
Outputs
Configurations
Scripting
utility: change shell layers
Inputs
Outputs
Configurations
Scripting
utility: merge meshes
Inputs
Outputs
Configurations
Scripting
utility: merge fields
Inputs
Outputs
Configurations
Scripting
utility: merge weighted fields
Inputs
Outputs
Configurations
Scripting
utility: merge fc to fc field matrices
Inputs
Outputs
Configurations
Scripting
filter: high pass (field)
Inputs
Outputs
Configurations
Scripting
utility: weighted merge fields by label
Inputs
Outputs
Configurations
Scripting
min_max: max by component
Inputs
Outputs
Configurations
Scripting
utility: merge fields by label
Inputs
Outputs
Configurations
Scripting
averaging: elemental to elemental nodal (field)
Inputs
Outputs
Configurations
Scripting
min_max: min max by entity
Inputs
Outputs
Configurations
Scripting
utility: merge collections
Inputs
Outputs
Configurations
Scripting
logic: merge solid and shell fields
Inputs
Outputs
Configurations
Scripting
min_max: min over time
Inputs
Outputs
Configurations
Scripting
geo: element nodal contribution
Inputs
Outputs
Configurations
Scripting
min_max: over field
Inputs
Outputs
Configurations
Scripting
result: transient rayleigh integration
Inputs
Outputs
Configurations
Scripting
min_max: over fields container
Inputs
Outputs
Configurations
Scripting
min_max: over label
Inputs
Outputs
Configurations
Scripting
min_max: min by component
Inputs
Outputs
Configurations
Scripting
math: average over label
Inputs
Outputs
Configurations
Scripting
math: accumulate level over label
Inputs
Outputs
Configurations
Scripting
serialization: serializer to string
Inputs
Outputs
Configurations
Scripting
serialization: deserializer
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded velocity
Inputs
Outputs
Configurations
Scripting
mesh: skin
Inputs
Outputs
Configurations
Scripting
utility: split in for each range
Inputs
Outputs
Configurations
Scripting
utility: make for each range
Inputs
Outputs
Configurations
Scripting
utility: incremental field
Inputs
Outputs
Configurations
Scripting
utility: incremental fields container
Inputs
Outputs
Configurations
Scripting
geo: rotate (fields container)
Inputs
Outputs
Configurations
Scripting
utility: incremental property field
Inputs
Outputs
Configurations
Scripting
utility: incremental mesh
Inputs
Outputs
Configurations
Scripting
mesh: points from coordinates
Inputs
Outputs
Configurations
Scripting
utility: incremental concantenate as fields container.
Inputs
Outputs
Configurations
Scripting
utility: make producer consumer for each iterator
Inputs
Outputs
Configurations
Scripting
utility: producer consumer for each
Inputs
Outputs
Configurations
Scripting
averaging: extend to mid nodes (field)
Inputs
Outputs
Configurations
Scripting
invariant: eigen vectors (on fields container)
Inputs
Outputs
Configurations
Scripting
mesh: mesh get attribute
Inputs
Outputs
Configurations
Scripting
metadata: time freq support get attribute
Inputs
Outputs
Configurations
Scripting
utility: set attribute
Inputs
Outputs
Configurations
Scripting
utility: field get attribute
Inputs
Outputs
Configurations
Scripting
min_max: time of min
Inputs
Outputs
Configurations
Scripting
min_max: max over phase
Inputs
Outputs
Configurations
Scripting
min_max: phase of max
Inputs
Outputs
Configurations
Scripting
utility: voigt to standard strains
Inputs
Outputs
Configurations
Scripting
utility: voigt to standard strains (fields container)
Inputs
Outputs
Configurations
Scripting
min_max: incremental over field
Inputs
Outputs
Configurations
Scripting
utility: workflow to pydpf generator
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (timefreq)
Inputs
Outputs
Configurations
Scripting
logic: same string fields?
Inputs
Outputs
Configurations
Scripting
logic: same meshes?
Inputs
Outputs
Configurations
Scripting
logic: same fields?
Inputs
Outputs
Configurations
Scripting
logic: fields included?
Inputs
Outputs
Configurations
Scripting
logic: same fields container?
Inputs
Outputs
Configurations
Scripting
filter: high pass (scoping)
Inputs
Outputs
Configurations
Scripting
filter: high pass (timescoping)
Inputs
Outputs
Configurations
Scripting
filter: high pass (fields container)
Inputs
Outputs
Configurations
Scripting
filter: low pass (field)
Inputs
Outputs
Configurations
Scripting
filter: low pass (fields container)
Inputs
Outputs
Configurations
Scripting
filter: band pass (scoping)
Inputs
Outputs
Configurations
Scripting
filter: band pass (timefreq)
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (scoping)
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (timescoping)
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (fields container)
Inputs
Outputs
Configurations
Scripting
serialization: csv to field
Inputs
Outputs
Configurations
Scripting
result: members in compression not certified
Inputs
Outputs
Configurations
Scripting
result: members in bending not certified
Inputs
Outputs
Configurations
Scripting
result: members in linear compression bending not certified
Inputs
Outputs
Configurations
Scripting
invariant: convertnum nod to bcs
Inputs
Outputs
Configurations
Scripting
geo: rotate
Inputs
Outputs
Configurations
Scripting
logic: enrich materials
Inputs
Outputs
Configurations
Scripting
serialization: data tree to json
Inputs
Outputs
Configurations
Scripting
serialization: data tree to txt
Inputs
Outputs
Configurations
Scripting
serialization: json to data tree
Inputs
Outputs
Configurations
Scripting
averaging: nodal difference (fields container)
Inputs
Outputs
Configurations
Scripting
logic: descending sort
Inputs
Outputs
Configurations
Scripting
logic: ascending sort (fields container)
Inputs
Outputs
Configurations
Scripting
logic: descending sort (fields container)
Inputs
Outputs
Configurations
Scripting
serialization: import symbolic workflow
Inputs
Outputs
Configurations
Scripting
filter: filtering max over time workflow
Inputs
Outputs
Configurations
Scripting
metadata: integrate over time freq
Inputs
Outputs
Configurations
Scripting
averaging: nodal difference (field)
Inputs
Outputs
Configurations
Scripting
result: compute stress YZ
Inputs
Outputs
Configurations
Scripting
logic: splitter::data_sources
Inputs
Outputs
Configurations
Scripting
averaging: to elemental nodal (fields container)
Inputs
Outputs
Configurations
Scripting
server: grpc start server
Inputs
Outputs
Configurations
Scripting
result: compute stress XY
Inputs
Outputs
Configurations
Scripting
utility: operator id
Inputs
Outputs
Configurations
Scripting
averaging: elemental nodal to nodal (field)
Inputs
Outputs
Configurations
Scripting
averaging: elemental nodal to nodal (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: elemental to nodal (field)
Configurating operators
- If the determinant of the I matrix is zero, switch to an inverse distance weighted average.
- If not, compute the Frink weights and apply the Holmes' weight clip.
- If the clipping produces a large overshoot, inverse volume weighted average is used..
-3. For a face finite volume mesh inverse distance weighted average is used.">Inputs
Outputs
Configurations
Scripting
averaging: to nodal (field)
Inputs
Outputs
Configurations
Scripting
averaging: to nodal (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: elemental mean (field)
Inputs
Outputs
Configurations
Scripting
averaging: elemental mean (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: nodal to elemental (field)
Inputs
Outputs
Configurations
Scripting
averaging: nodal to elemental (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: nodal to elemental nodal (field)
Inputs
Outputs
Configurations
Scripting
averaging: nodal to elemental nodal (fields container)
Inputs
Outputs
Configurations
Scripting
invariant: eigen values (field)
Inputs
Outputs
Configurations
Scripting
invariant: principal invariants (field)
Inputs
Outputs
Configurations
Scripting
invariant: von mises eqv (fields container)
Inputs
Outputs
Configurations
Scripting
invariant: segalman von mises eqv (fields container)
Inputs
Outputs
Configurations
Scripting
scoping: compute element centroids
Inputs
Outputs
Configurations
Scripting
metadata: cyclic mesh expansion
Inputs
Outputs
Configurations
Scripting
result: cyclic analytic stress eqv max
Inputs
Outputs
Configurations
Scripting
result: remove rigid body motion (fields container)
Inputs
Outputs
Configurations
Scripting
result: cyclic expansion
Inputs
Outputs
Configurations
Scripting
averaging: nodal fraction (fields container)
Inputs
Outputs
Configurations
Scripting
result: recombine cyclic harmonic indices
Inputs
Outputs
Configurations
Scripting
mapping: on coordinates
Inputs
Outputs
Configurations
Scripting
mapping: scoping on coordinates
Inputs
Outputs
Configurations
Scripting
filter: abc weightings
Inputs
Outputs
Configurations
Scripting
mapping: solid to skin
Inputs
Outputs
Configurations
Scripting
mapping: solid to skin (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: elemental difference (field)
Inputs
Outputs
Configurations
Scripting
averaging: elemental fraction (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: extend to mid nodes (fields container)
Inputs
Outputs
Configurations
Scripting
geo: rotate cylindrical coordinates
Inputs
Outputs
Configurations
Scripting
geo: rotate in cylindrical coordinates (fields container)
Inputs
Outputs
Configurations
Scripting
geo: spherical to cartesian coordinates (fields container)
Inputs
Outputs
Configurations
Scripting
geo: spherical to cartesian coordinates
Inputs
Outputs
Configurations
Scripting
mesh: change cs (meshes)
Inputs
Outputs
Configurations
Scripting
geo: normals provider nl (nodes, faces, or elements)
Inputs
Outputs
Configurations
Scripting
geo: elements volumes over time
Inputs
Outputs
Configurations
Scripting
math: window bartlett
Inputs
Outputs
Configurations
Scripting
mesh: from scoping
Inputs
Outputs
Configurations
Scripting
mesh: split field wrt mesh regions
Inputs
Outputs
Configurations
Scripting
result: torque
Inputs
Outputs
Configurations
Scripting
result: euler load buckling
Inputs
Outputs
Configurations
Scripting
geo: faces area
Inputs
Outputs
Configurations
Scripting
result: compute stress 3
Inputs
Outputs
Configurations
Scripting
geo: gauss to node (field)
Inputs
Outputs
Configurations
Scripting
averaging: gauss to node (fields container)
Inputs
Outputs
Configurations
Scripting
math: correlation
Inputs
Outputs
Configurations
Scripting
math: mac
Inputs
Outputs
Configurations
Scripting
result: workflow energy per component
Inputs
Outputs
Configurations
Scripting
result: add rigid body motion (field)
Inputs
Outputs
Configurations
Scripting
result: split on facet indices
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded temperature
Inputs
Outputs
Configurations
Scripting
result: enf solution to global cs
Inputs
Outputs
Configurations
Scripting
result: cms matrices provider
Inputs
Outputs
Configurations
Scripting
result: rom data provider
Inputs
Outputs
Configurations
Scripting
result: prns to field
Inputs
Outputs
Configurations
Scripting
result: remove rigid body motion (field)
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded displacement
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded acceleration
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded stress
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded el strain
Inputs
Outputs
Configurations
Scripting
result: cms subfile info provider
Inputs
Outputs
Configurations
Scripting
result: cyclic volume
Inputs
Outputs
Configurations
Scripting
result: cyclic nmisc
Inputs
Outputs
Configurations
Scripting
invariant: convertnum operator
Inputs
Outputs
Configurations
Scripting
result: compute total strain XZ
Inputs
Outputs
Configurations
Scripting
averaging: to nodal (field)
Inputs
Outputs
Configurations
Scripting
averaging: to nodal (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: elemental mean (field)
Inputs
Outputs
Configurations
Scripting
averaging: nodal to elemental (field)
Inputs
Outputs
Configurations
Scripting
averaging: nodal to elemental (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: nodal to elemental nodal (field)
Inputs
Outputs
Configurations
Scripting
averaging: nodal to elemental nodal (fields container)
Inputs
Outputs
Configurations
Scripting
invariant: eigen values (field)
Inputs
Outputs
Configurations
Scripting
invariant: principal invariants (field)
Inputs
Outputs
Configurations
Scripting
invariant: von mises eqv (fields container)
Inputs
Outputs
Configurations
Scripting
invariant: segalman von mises eqv (fields container)
Inputs
Outputs
Configurations
Scripting
scoping: compute element centroids
Inputs
Outputs
Configurations
Scripting
metadata: cyclic mesh expansion
Inputs
Outputs
Configurations
Scripting
result: cyclic analytic stress eqv max
Inputs
Outputs
Configurations
Scripting
result: remove rigid body motion (fields container)
Inputs
Outputs
Configurations
Scripting
result: cyclic expansion
Inputs
Outputs
Configurations
Scripting
averaging: nodal fraction (fields container)
Inputs
Outputs
Configurations
Scripting
result: recombine cyclic harmonic indices
Inputs
Outputs
Configurations
Scripting
mapping: on coordinates
Inputs
Outputs
Configurations
Scripting
mapping: scoping on coordinates
Inputs
Outputs
Configurations
Scripting
filter: abc weightings
Inputs
Outputs
Configurations
Scripting
mapping: solid to skin
Inputs
Outputs
Configurations
Scripting
mapping: solid to skin (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: elemental difference (field)
Inputs
Outputs
Configurations
Scripting
averaging: elemental fraction (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: extend to mid nodes (fields container)
Inputs
Outputs
Configurations
Scripting
geo: rotate cylindrical coordinates
Inputs
Outputs
Configurations
Scripting
geo: rotate in cylindrical coordinates (fields container)
Inputs
Outputs
Configurations
Scripting
geo: spherical to cartesian coordinates (fields container)
Inputs
Outputs
Configurations
Scripting
geo: spherical to cartesian coordinates
Inputs
Outputs
Configurations
Scripting
mesh: change cs (meshes)
Inputs
Outputs
Configurations
Scripting
geo: normals provider nl (nodes, faces, or elements)
Inputs
Outputs
Configurations
Scripting
geo: elements volumes over time
Inputs
Outputs
Configurations
Scripting
math: window bartlett
Inputs
Outputs
Configurations
Scripting
mesh: from scoping
Inputs
Outputs
Configurations
Scripting
mesh: split field wrt mesh regions
Inputs
Outputs
Configurations
Scripting
result: torque
Inputs
Outputs
Configurations
Scripting
result: euler load buckling
Inputs
Outputs
Configurations
Scripting
geo: faces area
Inputs
Outputs
Configurations
Scripting
result: compute stress 3
Inputs
Outputs
Configurations
Scripting
geo: gauss to node (field)
Inputs
Outputs
Configurations
Scripting
averaging: gauss to node (fields container)
Inputs
Outputs
Configurations
Scripting
math: correlation
Inputs
Outputs
Configurations
Scripting
math: mac
Inputs
Outputs
Configurations
Scripting
result: workflow energy per component
Inputs
Outputs
Configurations
Scripting
result: add rigid body motion (field)
Inputs
Outputs
Configurations
Scripting
result: split on facet indices
Inputs
Outputs
Configurations
Scripting
result: enf solution to global cs
Inputs
Outputs
Configurations
Scripting
result: cms matrices provider
Inputs
Outputs
Configurations
Scripting
result: rom data provider
Inputs
Outputs
Configurations
Scripting
result: prns to field
Inputs
Outputs
Configurations
Scripting
result: remove rigid body motion (field)
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded displacement
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded acceleration
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded stress
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded el strain
Inputs
Outputs
Configurations
Scripting
result: cms subfile info provider
Inputs
Outputs
Configurations
Scripting
result: cyclic volume
Inputs
Outputs
Configurations
Scripting
result: cyclic nmisc
Inputs
Outputs
Configurations
Scripting
invariant: convertnum operator
Inputs
Outputs
Configurations
Scripting
result: compute total strain XZ
Configurating operators
Only linear analysis are supported without On Demand Expansion.
All coordinates are global coordinates.
Euler Angles need to be included in the database.
- Get the XZ shear component (02 component).">Inputs
Outputs
Configurations
Scripting
result: cms dst table provider
Inputs
Outputs
Configurations
Scripting
result: write motion dfmf file
Inputs
Outputs
Configurations
Scripting
invariant: eigen vectors (on field)
Inputs
Outputs
Configurations
Scripting
result: mapdl material properties
Inputs
Outputs
Configurations
Scripting
result: mapdl_section
Inputs
Outputs
Configurations
Scripting
result: compute invariant terms motion
Inputs
Outputs
Configurations
Scripting
result: split to acmo facet indices
Inputs
Outputs
Configurations
Scripting
result: stress solution to global cs
Inputs
Outputs
Configurations
Scripting
result: elastic strain solution to global cs
Inputs
Outputs
Configurations
Scripting
result: plastic strain to global cs
Inputs
Outputs
Configurations
Scripting
math: qr solve
Inputs
Outputs
Configurations
Scripting
result: von mises strains as mechanical workflow
Inputs
Outputs
Configurations
Scripting
mesh: mesh clipper
Inputs
Outputs
Configurations
Scripting
serialization: migrate to vtk
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded element heat flux
Inputs
Outputs
Configurations
Scripting
mesh: external layer
Inputs
Outputs
Configurations
Scripting
mesh: mesh cutter
Inputs
Outputs
Configurations
Scripting
mesh: mesh plan clipper
Inputs
Outputs
Configurations
Scripting
mesh: mesh_to_graphics
Inputs
Outputs
Configurations
Scripting
mesh: mesh_to_graphics_edges
Inputs
Outputs
Configurations
Scripting
geo: scoping normals
Inputs
Outputs
Configurations
Scripting
mesh: combine levelset
Inputs
Outputs
Configurations
Scripting
mesh: exclude levelset
Inputs
Outputs
Configurations
Scripting
mesh: make plane levelset
Inputs
Outputs
Configurations
Scripting
mesh: make sphere levelset
Inputs
Outputs
Configurations
Scripting
mesh: wireframe
Inputs
Outputs
Configurations
Scripting
mesh: mesh to tetra
Inputs
Outputs
Configurations
Scripting
mapping: fft
Inputs
Outputs
Configurations
Scripting
math: fft gradient evaluation
Inputs
Outputs
Configurations
Scripting
math: fft multi harmonic solution minmax
Inputs
Outputs
Configurations
Scripting
math: svd
Inputs
Outputs
Configurations
Scripting
math: time integration
Inputs
Outputs
Configurations
Scripting
math: time derivation
Inputs
Outputs
Configurations
Scripting
mapping: prep sampling fft
Inputs
Outputs
Configurations
Scripting
math: fft filtering and cubic fitting
Inputs
Outputs
Configurations
Scripting
math: window triangular
Inputs
Outputs
Configurations
Scripting
math: window hanning
Inputs
Outputs
Configurations
Scripting
math: window hamming
Inputs
Outputs
Configurations
Scripting
math: window welch
Inputs
Outputs
Configurations
Scripting
math: window blackman
Inputs
Outputs
Configurations
Scripting
math: window triangular (fields container)
Inputs
Outputs
Configurations
Scripting
math: window hanning (fields container)
Inputs
Outputs
Configurations
Scripting
math: window hamming (fields container)
Inputs
Outputs
Configurations
Scripting
math: window welch (fields container)
Inputs
Outputs
Configurations
Scripting
math: window blackman (fields container)
Inputs
Outputs
Configurations
Scripting
math: modal superposition
Inputs
Outputs
Configurations
Scripting
serialization: hdf5dpf generate result file
Inputs
Outputs
Configurations
Scripting
result: migrate to h5dpf
Inputs
Outputs
Configurations
Scripting
result: cgns result provider
Inputs
Outputs
Configurations
Scripting
result: von mises stresses as mechanical workflow
Inputs
Outputs
Configurations
Scripting
utility: hdf5dpf workflow provider
Inputs
Outputs
Configurations
Scripting
other: hdf5dpf mesh property provider
Inputs
Outputs
Configurations
Scripting
serialization: migrate to vtu
Inputs
Outputs
Configurations
Scripting
serialization: vtu export
Inputs
Outputs
Configurations
Scripting
result: compute total strain Y
Inputs
Outputs
Configurations
Scripting
result: cms dst table provider
Inputs
Outputs
Configurations
Scripting
result: write motion dfmf file
Inputs
Outputs
Configurations
Scripting
invariant: eigen vectors (on field)
Inputs
Outputs
Configurations
Scripting
result: mapdl material properties
Inputs
Outputs
Configurations
Scripting
result: mapdl_section
Inputs
Outputs
Configurations
Scripting
result: compute invariant terms motion
Inputs
Outputs
Configurations
Scripting
result: split to acmo facet indices
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded element heat flux
Inputs
Outputs
Configurations
Scripting
result: stress solution to global cs
Inputs
Outputs
Configurations
Scripting
result: elastic strain solution to global cs
Inputs
Outputs
Configurations
Scripting
result: plastic strain to global cs
Inputs
Outputs
Configurations
Scripting
math: qr solve
Inputs
Outputs
Configurations
Scripting
result: von mises strains as mechanical workflow
Inputs
Outputs
Configurations
Scripting
mesh: mesh clipper
Inputs
Outputs
Configurations
Scripting
serialization: migrate to vtk
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded temperature
Inputs
Outputs
Configurations
Scripting
mesh: external layer
Inputs
Outputs
Configurations
Scripting
mesh: mesh cutter
Inputs
Outputs
Configurations
Scripting
mesh: mesh plan clipper
Inputs
Outputs
Configurations
Scripting
mesh: mesh_to_graphics
Inputs
Outputs
Configurations
Scripting
mesh: mesh_to_graphics_edges
Inputs
Outputs
Configurations
Scripting
geo: scoping normals
Inputs
Outputs
Configurations
Scripting
mesh: combine levelset
Inputs
Outputs
Configurations
Scripting
mesh: exclude levelset
Inputs
Outputs
Configurations
Scripting
mesh: make plane levelset
Inputs
Outputs
Configurations
Scripting
mesh: make sphere levelset
Inputs
Outputs
Configurations
Scripting
mesh: wireframe
Inputs
Outputs
Configurations
Scripting
mesh: mesh to tetra
Inputs
Outputs
Configurations
Scripting
mapping: fft
Inputs
Outputs
Configurations
Scripting
math: fft gradient evaluation
Inputs
Outputs
Configurations
Scripting
math: fft multi harmonic solution minmax
Inputs
Outputs
Configurations
Scripting
math: svd
Inputs
Outputs
Configurations
Scripting
math: time integration
Inputs
Outputs
Configurations
Scripting
math: time derivation
Inputs
Outputs
Configurations
Scripting
mapping: prep sampling fft
Inputs
Outputs
Configurations
Scripting
math: fft filtering and cubic fitting
Inputs
Outputs
Configurations
Scripting
math: window triangular
Inputs
Outputs
Configurations
Scripting
math: window hanning
Inputs
Outputs
Configurations
Scripting
math: window hamming
Inputs
Outputs
Configurations
Scripting
math: window welch
Inputs
Outputs
Configurations
Scripting
math: window blackman
Inputs
Outputs
Configurations
Scripting
math: window triangular (fields container)
Inputs
Outputs
Configurations
Scripting
math: window hanning (fields container)
Inputs
Outputs
Configurations
Scripting
math: window hamming (fields container)
Inputs
Outputs
Configurations
Scripting
math: window welch (fields container)
Inputs
Outputs
Configurations
Scripting
math: window blackman (fields container)
Inputs
Outputs
Configurations
Scripting
math: modal superposition
Inputs
Outputs
Configurations
Scripting
serialization: hdf5dpf generate result file
Inputs
Outputs
Configurations
Scripting
result: migrate to h5dpf
Inputs
Outputs
Configurations
Scripting
result: von mises stresses as mechanical workflow
Inputs
Outputs
Configurations
Scripting
utility: hdf5dpf workflow provider
Inputs
Outputs
Configurations
Scripting
other: hdf5dpf mesh property provider
Inputs
Outputs
Configurations
Scripting
serialization: migrate to vtu
Inputs
Outputs
Configurations
Scripting
serialization: vtu export
Inputs
Outputs
Configurations
Scripting
result: compute total strain Y
>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.metadata.coordinate_system_data_provider()
+
+ >>> # Make input connections
+ >>> my_solver_coordinate_system_ids = int()
+ >>> op.inputs.solver_coordinate_system_ids.connect(my_solver_coordinate_system_ids)
+ >>> my_streams = dpf.StreamsContainer()
+ >>> op.inputs.streams.connect(my_streams)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.metadata.coordinate_system_data_provider(
+ ... solver_coordinate_system_ids=my_solver_coordinate_system_ids,
+ ... streams=my_streams,
+ ... data_sources=my_data_sources,
+ ... )
+
+ >>> # Get output data
+ >>> result_coordinate_system_data1 = op.outputs.coordinate_system_data1()
+ >>> result_coordinate_system_data2 = op.outputs.coordinate_system_data2()
+ """
+
+ def __init__(
+ self,
+ solver_coordinate_system_ids=None,
+ streams=None,
+ data_sources=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(
+ name="coordinate_systems_data_provider", config=config, server=server
+ )
+ self._inputs = InputsCoordinateSystemDataProvider(self)
+ self._outputs = OutputsCoordinateSystemDataProvider(self)
+ if solver_coordinate_system_ids is not None:
+ self.inputs.solver_coordinate_system_ids.connect(
+ solver_coordinate_system_ids
+ )
+ if streams is not None:
+ self.inputs.streams.connect(streams)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+
+ @staticmethod
+ def _spec():
+ description = """Reads coordinate systems data from the result files contained in the
+ streams or data sources."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 1: PinSpecification(
+ name="solver_coordinate_system_ids",
+ type_names=["int32", "vector"],
+ optional=True,
+ document="""Coorfinate system ids to recover used by the
+ solver. if not set, all available
+ materials to be recovered.""",
+ ),
+ 3: PinSpecification(
+ name="streams",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data.""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set.""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="coordinate_system_data1",
+ type_names=["generic_data_container"],
+ optional=False,
+ document="""""",
+ ),
+ 1: PinSpecification(
+ name="coordinate_system_data2",
+ type_names=["generic_data_container"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(
+ name="coordinate_systems_data_provider", server=server
+ )
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsCoordinateSystemDataProvider
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsCoordinateSystemDataProvider
+ """
+ return super().outputs
+
+
+class InputsCoordinateSystemDataProvider(_Inputs):
+ """Intermediate class used to connect user inputs to
+ coordinate_system_data_provider operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.coordinate_system_data_provider()
+ >>> my_solver_coordinate_system_ids = int()
+ >>> op.inputs.solver_coordinate_system_ids.connect(my_solver_coordinate_system_ids)
+ >>> my_streams = dpf.StreamsContainer()
+ >>> op.inputs.streams.connect(my_streams)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(coordinate_system_data_provider._spec().inputs, op)
+ self._solver_coordinate_system_ids = Input(
+ coordinate_system_data_provider._spec().input_pin(1), 1, op, -1
+ )
+ self._inputs.append(self._solver_coordinate_system_ids)
+ self._streams = Input(
+ coordinate_system_data_provider._spec().input_pin(3), 3, op, -1
+ )
+ self._inputs.append(self._streams)
+ self._data_sources = Input(
+ coordinate_system_data_provider._spec().input_pin(4), 4, op, -1
+ )
+ self._inputs.append(self._data_sources)
+
+ @property
+ def solver_coordinate_system_ids(self):
+ """Allows to connect solver_coordinate_system_ids input to the operator.
+
+ Coorfinate system ids to recover used by the
+ solver. if not set, all available
+ materials to be recovered.
+
+ Parameters
+ ----------
+ my_solver_coordinate_system_ids : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.coordinate_system_data_provider()
+ >>> op.inputs.solver_coordinate_system_ids.connect(my_solver_coordinate_system_ids)
+ >>> # or
+ >>> op.inputs.solver_coordinate_system_ids(my_solver_coordinate_system_ids)
+ """
+ return self._solver_coordinate_system_ids
+
+ @property
+ def streams(self):
+ """Allows to connect streams input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data.
+
+ Parameters
+ ----------
+ my_streams : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.coordinate_system_data_provider()
+ >>> op.inputs.streams.connect(my_streams)
+ >>> # or
+ >>> op.inputs.streams(my_streams)
+ """
+ return self._streams
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set.
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.coordinate_system_data_provider()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+
+class OutputsCoordinateSystemDataProvider(_Outputs):
+ """Intermediate class used to get outputs from
+ coordinate_system_data_provider operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.coordinate_system_data_provider()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_coordinate_system_data1 = op.outputs.coordinate_system_data1()
+ >>> result_coordinate_system_data2 = op.outputs.coordinate_system_data2()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(coordinate_system_data_provider._spec().outputs, op)
+ self._coordinate_system_data1 = Output(
+ coordinate_system_data_provider._spec().output_pin(0), 0, op
+ )
+ self._outputs.append(self._coordinate_system_data1)
+ self._coordinate_system_data2 = Output(
+ coordinate_system_data_provider._spec().output_pin(1), 1, op
+ )
+ self._outputs.append(self._coordinate_system_data2)
+
+ @property
+ def coordinate_system_data1(self):
+ """Allows to get coordinate_system_data1 output of the operator
+
+ Returns
+ ----------
+ my_coordinate_system_data1 : GenericDataContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.coordinate_system_data_provider()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_coordinate_system_data1 = op.outputs.coordinate_system_data1()
+ """ # noqa: E501
+ return self._coordinate_system_data1
+
+ @property
+ def coordinate_system_data2(self):
+ """Allows to get coordinate_system_data2 output of the operator
+
+ Returns
+ ----------
+ my_coordinate_system_data2 : GenericDataContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.coordinate_system_data_provider()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_coordinate_system_data2 = op.outputs.coordinate_system_data2()
+ """ # noqa: E501
+ return self._coordinate_system_data2
diff --git a/src/ansys/dpf/core/operators/metadata/element_types_provider.py b/src/ansys/dpf/core/operators/metadata/element_types_provider.py
new file mode 100644
index 0000000000..fd359a7279
--- /dev/null
+++ b/src/ansys/dpf/core/operators/metadata/element_types_provider.py
@@ -0,0 +1,312 @@
+"""
+element_types_provider
+
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class element_types_provider(Operator):
+ """Reads element types data from the result files contained in the
+ streams or data sources.
+
+ Parameters
+ ----------
+ solver_element_types_ids : int, optional
+ Element type ids to recover used by the
+ solver. if not set, all available
+ element types to be recovered.
+ streams : StreamsContainer, optional
+ Result file container allowed to be kept open
+ to cache data.
+ data_sources : DataSources
+ Result file path container, used if no
+ streams are set.
+
+ Returns
+ -------
+ element_types_data : GenericDataContainer
+ The generic_data_container has a class_name:
+ elementtypesproperties. it contains
+ the following property fields:
+ element_routine_number: element
+ routine number. e.g 186 for solid186,
+ keyopts: element type option keys,
+ kdofs: dof/node for this element
+ type.this is a bit mapping, nodelm:
+ number of nodes for this element
+ type, nodfor: number of nodes per
+ element having nodal forces, nodstr:
+ number of nodes per element having
+ nodal stresses, new_gen_element:
+ element of new generation.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.metadata.element_types_provider()
+
+ >>> # Make input connections
+ >>> my_solver_element_types_ids = int()
+ >>> op.inputs.solver_element_types_ids.connect(my_solver_element_types_ids)
+ >>> my_streams = dpf.StreamsContainer()
+ >>> op.inputs.streams.connect(my_streams)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.metadata.element_types_provider(
+ ... solver_element_types_ids=my_solver_element_types_ids,
+ ... streams=my_streams,
+ ... data_sources=my_data_sources,
+ ... )
+
+ >>> # Get output data
+ >>> result_element_types_data = op.outputs.element_types_data()
+ """
+
+ def __init__(
+ self,
+ solver_element_types_ids=None,
+ streams=None,
+ data_sources=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(name="element_types_provider", config=config, server=server)
+ self._inputs = InputsElementTypesProvider(self)
+ self._outputs = OutputsElementTypesProvider(self)
+ if solver_element_types_ids is not None:
+ self.inputs.solver_element_types_ids.connect(solver_element_types_ids)
+ if streams is not None:
+ self.inputs.streams.connect(streams)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+
+ @staticmethod
+ def _spec():
+ description = """Reads element types data from the result files contained in the
+ streams or data sources."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 1: PinSpecification(
+ name="solver_element_types_ids",
+ type_names=["int32", "vector"],
+ optional=True,
+ document="""Element type ids to recover used by the
+ solver. if not set, all available
+ element types to be recovered.""",
+ ),
+ 3: PinSpecification(
+ name="streams",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data.""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set.""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="element_types_data",
+ type_names=["generic_data_container"],
+ optional=False,
+ document="""The generic_data_container has a class_name:
+ elementtypesproperties. it contains
+ the following property fields:
+ element_routine_number: element
+ routine number. e.g 186 for solid186,
+ keyopts: element type option keys,
+ kdofs: dof/node for this element
+ type.this is a bit mapping, nodelm:
+ number of nodes for this element
+ type, nodfor: number of nodes per
+ element having nodal forces, nodstr:
+ number of nodes per element having
+ nodal stresses, new_gen_element:
+ element of new generation.""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(name="element_types_provider", server=server)
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsElementTypesProvider
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsElementTypesProvider
+ """
+ return super().outputs
+
+
+class InputsElementTypesProvider(_Inputs):
+ """Intermediate class used to connect user inputs to
+ element_types_provider operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.element_types_provider()
+ >>> my_solver_element_types_ids = int()
+ >>> op.inputs.solver_element_types_ids.connect(my_solver_element_types_ids)
+ >>> my_streams = dpf.StreamsContainer()
+ >>> op.inputs.streams.connect(my_streams)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(element_types_provider._spec().inputs, op)
+ self._solver_element_types_ids = Input(
+ element_types_provider._spec().input_pin(1), 1, op, -1
+ )
+ self._inputs.append(self._solver_element_types_ids)
+ self._streams = Input(element_types_provider._spec().input_pin(3), 3, op, -1)
+ self._inputs.append(self._streams)
+ self._data_sources = Input(
+ element_types_provider._spec().input_pin(4), 4, op, -1
+ )
+ self._inputs.append(self._data_sources)
+
+ @property
+ def solver_element_types_ids(self):
+ """Allows to connect solver_element_types_ids input to the operator.
+
+ Element type ids to recover used by the
+ solver. if not set, all available
+ element types to be recovered.
+
+ Parameters
+ ----------
+ my_solver_element_types_ids : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.element_types_provider()
+ >>> op.inputs.solver_element_types_ids.connect(my_solver_element_types_ids)
+ >>> # or
+ >>> op.inputs.solver_element_types_ids(my_solver_element_types_ids)
+ """
+ return self._solver_element_types_ids
+
+ @property
+ def streams(self):
+ """Allows to connect streams input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data.
+
+ Parameters
+ ----------
+ my_streams : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.element_types_provider()
+ >>> op.inputs.streams.connect(my_streams)
+ >>> # or
+ >>> op.inputs.streams(my_streams)
+ """
+ return self._streams
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set.
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.element_types_provider()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+
+class OutputsElementTypesProvider(_Outputs):
+ """Intermediate class used to get outputs from
+ element_types_provider operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.element_types_provider()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_element_types_data = op.outputs.element_types_data()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(element_types_provider._spec().outputs, op)
+ self._element_types_data = Output(
+ element_types_provider._spec().output_pin(0), 0, op
+ )
+ self._outputs.append(self._element_types_data)
+
+ @property
+ def element_types_data(self):
+ """Allows to get element_types_data output of the operator
+
+ Returns
+ ----------
+ my_element_types_data : GenericDataContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.element_types_provider()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_element_types_data = op.outputs.element_types_data()
+ """ # noqa: E501
+ return self._element_types_data
diff --git a/src/ansys/dpf/core/operators/metadata/real_constants_provider.py b/src/ansys/dpf/core/operators/metadata/real_constants_provider.py
new file mode 100644
index 0000000000..78efd6d52c
--- /dev/null
+++ b/src/ansys/dpf/core/operators/metadata/real_constants_provider.py
@@ -0,0 +1,315 @@
+"""
+real_constants_provider
+
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class real_constants_provider(Operator):
+ """Reads real constants from the result files contained in the streams or
+ data sources.
+
+ Parameters
+ ----------
+ solver_real_constants_ids : int, optional
+ Real constant ids to recover used by the
+ solver. if not set, all available
+ real constants to be recovered.
+ streams : StreamsContainer, optional
+ Result file container allowed to be kept open
+ to cache data.
+ data_sources : DataSources
+ Result file path container, used if no
+ streams are set.
+
+ Returns
+ -------
+ real_constants1 : Field
+ real_constants2 : Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.metadata.real_constants_provider()
+
+ >>> # Make input connections
+ >>> my_solver_real_constants_ids = int()
+ >>> op.inputs.solver_real_constants_ids.connect(my_solver_real_constants_ids)
+ >>> my_streams = dpf.StreamsContainer()
+ >>> op.inputs.streams.connect(my_streams)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.metadata.real_constants_provider(
+ ... solver_real_constants_ids=my_solver_real_constants_ids,
+ ... streams=my_streams,
+ ... data_sources=my_data_sources,
+ ... )
+
+ >>> # Get output data
+ >>> result_real_constants1 = op.outputs.real_constants1()
+ >>> result_real_constants2 = op.outputs.real_constants2()
+ """
+
+ def __init__(
+ self,
+ solver_real_constants_ids=None,
+ streams=None,
+ data_sources=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(name="real_constants_provider", config=config, server=server)
+ self._inputs = InputsRealConstantsProvider(self)
+ self._outputs = OutputsRealConstantsProvider(self)
+ if solver_real_constants_ids is not None:
+ self.inputs.solver_real_constants_ids.connect(solver_real_constants_ids)
+ if streams is not None:
+ self.inputs.streams.connect(streams)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+
+ @staticmethod
+ def _spec():
+ description = """Reads real constants from the result files contained in the streams or
+ data sources."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 1: PinSpecification(
+ name="solver_real_constants_ids",
+ type_names=["int32", "vector"],
+ optional=True,
+ document="""Real constant ids to recover used by the
+ solver. if not set, all available
+ real constants to be recovered.""",
+ ),
+ 3: PinSpecification(
+ name="streams",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data.""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set.""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="real_constants1",
+ type_names=["field"],
+ optional=False,
+ document="""""",
+ ),
+ 1: PinSpecification(
+ name="real_constants2",
+ type_names=["field"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(name="real_constants_provider", server=server)
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsRealConstantsProvider
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsRealConstantsProvider
+ """
+ return super().outputs
+
+
+class InputsRealConstantsProvider(_Inputs):
+ """Intermediate class used to connect user inputs to
+ real_constants_provider operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.real_constants_provider()
+ >>> my_solver_real_constants_ids = int()
+ >>> op.inputs.solver_real_constants_ids.connect(my_solver_real_constants_ids)
+ >>> my_streams = dpf.StreamsContainer()
+ >>> op.inputs.streams.connect(my_streams)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(real_constants_provider._spec().inputs, op)
+ self._solver_real_constants_ids = Input(
+ real_constants_provider._spec().input_pin(1), 1, op, -1
+ )
+ self._inputs.append(self._solver_real_constants_ids)
+ self._streams = Input(real_constants_provider._spec().input_pin(3), 3, op, -1)
+ self._inputs.append(self._streams)
+ self._data_sources = Input(
+ real_constants_provider._spec().input_pin(4), 4, op, -1
+ )
+ self._inputs.append(self._data_sources)
+
+ @property
+ def solver_real_constants_ids(self):
+ """Allows to connect solver_real_constants_ids input to the operator.
+
+ Real constant ids to recover used by the
+ solver. if not set, all available
+ real constants to be recovered.
+
+ Parameters
+ ----------
+ my_solver_real_constants_ids : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.real_constants_provider()
+ >>> op.inputs.solver_real_constants_ids.connect(my_solver_real_constants_ids)
+ >>> # or
+ >>> op.inputs.solver_real_constants_ids(my_solver_real_constants_ids)
+ """
+ return self._solver_real_constants_ids
+
+ @property
+ def streams(self):
+ """Allows to connect streams input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data.
+
+ Parameters
+ ----------
+ my_streams : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.real_constants_provider()
+ >>> op.inputs.streams.connect(my_streams)
+ >>> # or
+ >>> op.inputs.streams(my_streams)
+ """
+ return self._streams
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set.
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.real_constants_provider()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+
+class OutputsRealConstantsProvider(_Outputs):
+ """Intermediate class used to get outputs from
+ real_constants_provider operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.real_constants_provider()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_real_constants1 = op.outputs.real_constants1()
+ >>> result_real_constants2 = op.outputs.real_constants2()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(real_constants_provider._spec().outputs, op)
+ self._real_constants1 = Output(
+ real_constants_provider._spec().output_pin(0), 0, op
+ )
+ self._outputs.append(self._real_constants1)
+ self._real_constants2 = Output(
+ real_constants_provider._spec().output_pin(1), 1, op
+ )
+ self._outputs.append(self._real_constants2)
+
+ @property
+ def real_constants1(self):
+ """Allows to get real_constants1 output of the operator
+
+ Returns
+ ----------
+ my_real_constants1 : Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.real_constants_provider()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_real_constants1 = op.outputs.real_constants1()
+ """ # noqa: E501
+ return self._real_constants1
+
+ @property
+ def real_constants2(self):
+ """Allows to get real_constants2 output of the operator
+
+ Returns
+ ----------
+ my_real_constants2 : Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.metadata.real_constants_provider()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_real_constants2 = op.outputs.real_constants2()
+ """ # noqa: E501
+ return self._real_constants2
diff --git a/src/ansys/dpf/core/operators/result/__init__.py b/src/ansys/dpf/core/operators/result/__init__.py
index 05261fd0f8..7ea819543e 100644
--- a/src/ansys/dpf/core/operators/result/__init__.py
+++ b/src/ansys/dpf/core/operators/result/__init__.py
@@ -99,7 +99,13 @@
from .elastic_strain_YZ import elastic_strain_YZ
from .elastic_strain_Z import elastic_strain_Z
from .electric_field import electric_field
+from .electric_field_X import electric_field_X
+from .electric_field_Y import electric_field_Y
+from .electric_field_Z import electric_field_Z
from .electric_flux_density import electric_flux_density
+from .electric_flux_density_X import electric_flux_density_X
+from .electric_flux_density_Y import electric_flux_density_Y
+from .electric_flux_density_Z import electric_flux_density_Z
from .electric_potential import electric_potential
from .elemental_heat_generation import elemental_heat_generation
from .elemental_mass import elemental_mass
@@ -181,7 +187,13 @@
from .kinetic_energy import kinetic_energy
from .mach_number import mach_number
from .magnetic_field import magnetic_field
+from .magnetic_field_X import magnetic_field_X
+from .magnetic_field_Y import magnetic_field_Y
+from .magnetic_field_Z import magnetic_field_Z
from .magnetic_flux_density import magnetic_flux_density
+from .magnetic_flux_density_X import magnetic_flux_density_X
+from .magnetic_flux_density_Y import magnetic_flux_density_Y
+from .magnetic_flux_density_Z import magnetic_flux_density_Z
from .magnetic_scalar_potential import magnetic_scalar_potential
from .magnetic_vector_potential import magnetic_vector_potential
from .mapdl_material_properties import mapdl_material_properties
@@ -285,6 +297,9 @@
from .tangential_contact_moment import tangential_contact_moment
from .temperature import temperature
from .temperature_grad import temperature_grad
+from .temperature_grad_X import temperature_grad_X
+from .temperature_grad_Y import temperature_grad_Y
+from .temperature_grad_Z import temperature_grad_Z
from .thermal_conductivity import thermal_conductivity
from .thermal_dissipation_energy import thermal_dissipation_energy
from .thermal_strain import thermal_strain
diff --git a/src/ansys/dpf/core/operators/result/cyclic_expanded_heat_flux.py b/src/ansys/dpf/core/operators/result/cyclic_expanded_heat_flux.py
index 036f09a13c..f6ecda5a51 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_expanded_heat_flux.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_expanded_heat_flux.py
@@ -12,7 +12,7 @@
class cyclic_expanded_heat_flux(Operator):
- """Read mapdl::rth::TF from an rst file and expand it with cyclic
+ """Read mapdl::rst::TF from an rst file and expand it with cyclic
symmetry.
Parameters
@@ -138,7 +138,7 @@ def __init__(
config=None,
server=None,
):
- super().__init__(name="mapdl::rth::TF_cyclic", config=config, server=server)
+ super().__init__(name="mapdl::rst::TF_cyclic", config=config, server=server)
self._inputs = InputsCyclicExpandedHeatFlux(self)
self._outputs = OutputsCyclicExpandedHeatFlux(self)
if time_scoping is not None:
@@ -172,7 +172,7 @@ def __init__(
@staticmethod
def _spec():
- description = """Read mapdl::rth::TF from an rst file and expand it with cyclic
+ description = """Read mapdl::rst::TF from an rst file and expand it with cyclic
symmetry."""
spec = Specification(
description=description,
@@ -303,7 +303,7 @@ def default_config(server=None):
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
"""
- return Operator.default_config(name="mapdl::rth::TF_cyclic", server=server)
+ return Operator.default_config(name="mapdl::rst::TF_cyclic", server=server)
@property
def inputs(self):
diff --git a/src/ansys/dpf/core/operators/result/cyclic_expanded_temperature.py b/src/ansys/dpf/core/operators/result/cyclic_expanded_temperature.py
index eca446d971..efe19cbf06 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_expanded_temperature.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_expanded_temperature.py
@@ -137,7 +137,7 @@ def __init__(
config=None,
server=None,
):
- super().__init__(name="mapdl::rth::TEMP_cyclic", config=config, server=server)
+ super().__init__(name="mapdl::rst::TEMP_cyclic", config=config, server=server)
self._inputs = InputsCyclicExpandedTemperature(self)
self._outputs = OutputsCyclicExpandedTemperature(self)
if time_scoping is not None:
@@ -303,7 +303,7 @@ def default_config(server=None):
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
"""
- return Operator.default_config(name="mapdl::rth::TEMP_cyclic", server=server)
+ return Operator.default_config(name="mapdl::rst::TEMP_cyclic", server=server)
@property
def inputs(self):
diff --git a/src/ansys/dpf/core/operators/result/electric_field_X.py b/src/ansys/dpf/core/operators/result/electric_field_X.py
new file mode 100644
index 0000000000..48baca6fa5
--- /dev/null
+++ b/src/ansys/dpf/core/operators/result/electric_field_X.py
@@ -0,0 +1,653 @@
+"""
+electric_field_X
+
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class electric_field_X(Operator):
+ """Read/compute electric field X component of the vector (1st component)
+ by calling the readers defined by the datasources. Regarding the
+ requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
+ Parameters
+ ----------
+ time_scoping : Scoping or int or float or Field, optional
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+ mesh_scoping : ScopingsContainer or Scoping, optional
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+ fields_container : FieldsContainer, optional
+ Fieldscontainer already allocated modified
+ inplace
+ streams_container : StreamsContainer, optional
+ Result file container allowed to be kept open
+ to cache data
+ data_sources : DataSources
+ Result file path container, used if no
+ streams are set
+ bool_rotate_to_global : bool, optional
+ If true the field is rotated to global
+ coordinate system (default true)
+ mesh : MeshedRegion or MeshesContainer, optional
+ Prevents from reading the mesh in the result
+ files
+ requested_location : str, optional
+ Requested location, default is nodal
+ read_cyclic : int, optional
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+ read_beams : bool, optional
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Returns
+ -------
+ fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.result.electric_field_X()
+
+ >>> # Make input connections
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.result.electric_field_X(
+ ... time_scoping=my_time_scoping,
+ ... mesh_scoping=my_mesh_scoping,
+ ... fields_container=my_fields_container,
+ ... streams_container=my_streams_container,
+ ... data_sources=my_data_sources,
+ ... bool_rotate_to_global=my_bool_rotate_to_global,
+ ... mesh=my_mesh,
+ ... requested_location=my_requested_location,
+ ... read_cyclic=my_read_cyclic,
+ ... read_beams=my_read_beams,
+ ... )
+
+ >>> # Get output data
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(
+ self,
+ time_scoping=None,
+ mesh_scoping=None,
+ fields_container=None,
+ streams_container=None,
+ data_sources=None,
+ bool_rotate_to_global=None,
+ mesh=None,
+ requested_location=None,
+ read_cyclic=None,
+ read_beams=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(name="EFX", config=config, server=server)
+ self._inputs = InputsElectricFieldX(self)
+ self._outputs = OutputsElectricFieldX(self)
+ if time_scoping is not None:
+ self.inputs.time_scoping.connect(time_scoping)
+ if mesh_scoping is not None:
+ self.inputs.mesh_scoping.connect(mesh_scoping)
+ if fields_container is not None:
+ self.inputs.fields_container.connect(fields_container)
+ if streams_container is not None:
+ self.inputs.streams_container.connect(streams_container)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+ if bool_rotate_to_global is not None:
+ self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global)
+ if mesh is not None:
+ self.inputs.mesh.connect(mesh)
+ if requested_location is not None:
+ self.inputs.requested_location.connect(requested_location)
+ if read_cyclic is not None:
+ self.inputs.read_cyclic.connect(read_cyclic)
+ if read_beams is not None:
+ self.inputs.read_beams.connect(read_beams)
+
+ @staticmethod
+ def _spec():
+ description = """Read/compute electric field X component of the vector (1st component)
+ by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh
+ scoping, the result location can be
+ Nodal/ElementalNodal/Elemental."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 0: PinSpecification(
+ name="time_scoping",
+ type_names=[
+ "scoping",
+ "int32",
+ "vector",
+ "double",
+ "field",
+ "vector",
+ ],
+ optional=True,
+ document="""Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.""",
+ ),
+ 1: PinSpecification(
+ name="mesh_scoping",
+ type_names=["scopings_container", "scoping"],
+ optional=True,
+ document="""Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains""",
+ ),
+ 2: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=True,
+ document="""Fieldscontainer already allocated modified
+ inplace""",
+ ),
+ 3: PinSpecification(
+ name="streams_container",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set""",
+ ),
+ 5: PinSpecification(
+ name="bool_rotate_to_global",
+ type_names=["bool"],
+ optional=True,
+ document="""If true the field is rotated to global
+ coordinate system (default true)""",
+ ),
+ 7: PinSpecification(
+ name="mesh",
+ type_names=["abstract_meshed_region", "meshes_container"],
+ optional=True,
+ document="""Prevents from reading the mesh in the result
+ files""",
+ ),
+ 9: PinSpecification(
+ name="requested_location",
+ type_names=["string"],
+ optional=True,
+ document="""Requested location, default is nodal""",
+ ),
+ 14: PinSpecification(
+ name="read_cyclic",
+ type_names=["enum dataProcessing::ECyclicReading", "int32"],
+ optional=True,
+ document="""If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)""",
+ ),
+ 22: PinSpecification(
+ name="read_beams",
+ type_names=["bool"],
+ optional=True,
+ document="""Elemental nodal beam results are read if this
+ pin is set to true (default is false)""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(name="EFX", server=server)
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsElectricFieldX
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsElectricFieldX
+ """
+ return super().outputs
+
+
+class InputsElectricFieldX(_Inputs):
+ """Intermediate class used to connect user inputs to
+ electric_field_X operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_X()
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(electric_field_X._spec().inputs, op)
+ self._time_scoping = Input(electric_field_X._spec().input_pin(0), 0, op, -1)
+ self._inputs.append(self._time_scoping)
+ self._mesh_scoping = Input(electric_field_X._spec().input_pin(1), 1, op, -1)
+ self._inputs.append(self._mesh_scoping)
+ self._fields_container = Input(electric_field_X._spec().input_pin(2), 2, op, -1)
+ self._inputs.append(self._fields_container)
+ self._streams_container = Input(
+ electric_field_X._spec().input_pin(3), 3, op, -1
+ )
+ self._inputs.append(self._streams_container)
+ self._data_sources = Input(electric_field_X._spec().input_pin(4), 4, op, -1)
+ self._inputs.append(self._data_sources)
+ self._bool_rotate_to_global = Input(
+ electric_field_X._spec().input_pin(5), 5, op, -1
+ )
+ self._inputs.append(self._bool_rotate_to_global)
+ self._mesh = Input(electric_field_X._spec().input_pin(7), 7, op, -1)
+ self._inputs.append(self._mesh)
+ self._requested_location = Input(
+ electric_field_X._spec().input_pin(9), 9, op, -1
+ )
+ self._inputs.append(self._requested_location)
+ self._read_cyclic = Input(electric_field_X._spec().input_pin(14), 14, op, -1)
+ self._inputs.append(self._read_cyclic)
+ self._read_beams = Input(electric_field_X._spec().input_pin(22), 22, op, -1)
+ self._inputs.append(self._read_beams)
+
+ @property
+ def time_scoping(self):
+ """Allows to connect time_scoping input to the operator.
+
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+
+ Parameters
+ ----------
+ my_time_scoping : Scoping or int or float or Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_X()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> # or
+ >>> op.inputs.time_scoping(my_time_scoping)
+ """
+ return self._time_scoping
+
+ @property
+ def mesh_scoping(self):
+ """Allows to connect mesh_scoping input to the operator.
+
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+
+ Parameters
+ ----------
+ my_mesh_scoping : ScopingsContainer or Scoping
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_X()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> # or
+ >>> op.inputs.mesh_scoping(my_mesh_scoping)
+ """
+ return self._mesh_scoping
+
+ @property
+ def fields_container(self):
+ """Allows to connect fields_container input to the operator.
+
+ Fieldscontainer already allocated modified
+ inplace
+
+ Parameters
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_X()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> # or
+ >>> op.inputs.fields_container(my_fields_container)
+ """
+ return self._fields_container
+
+ @property
+ def streams_container(self):
+ """Allows to connect streams_container input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data
+
+ Parameters
+ ----------
+ my_streams_container : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_X()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> # or
+ >>> op.inputs.streams_container(my_streams_container)
+ """
+ return self._streams_container
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_X()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+ @property
+ def bool_rotate_to_global(self):
+ """Allows to connect bool_rotate_to_global input to the operator.
+
+ If true the field is rotated to global
+ coordinate system (default true)
+
+ Parameters
+ ----------
+ my_bool_rotate_to_global : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_X()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> # or
+ >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global)
+ """
+ return self._bool_rotate_to_global
+
+ @property
+ def mesh(self):
+ """Allows to connect mesh input to the operator.
+
+ Prevents from reading the mesh in the result
+ files
+
+ Parameters
+ ----------
+ my_mesh : MeshedRegion or MeshesContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_X()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> # or
+ >>> op.inputs.mesh(my_mesh)
+ """
+ return self._mesh
+
+ @property
+ def requested_location(self):
+ """Allows to connect requested_location input to the operator.
+
+ Requested location, default is nodal
+
+ Parameters
+ ----------
+ my_requested_location : str
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_X()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> # or
+ >>> op.inputs.requested_location(my_requested_location)
+ """
+ return self._requested_location
+
+ @property
+ def read_cyclic(self):
+ """Allows to connect read_cyclic input to the operator.
+
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+
+ Parameters
+ ----------
+ my_read_cyclic : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_X()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> # or
+ >>> op.inputs.read_cyclic(my_read_cyclic)
+ """
+ return self._read_cyclic
+
+ @property
+ def read_beams(self):
+ """Allows to connect read_beams input to the operator.
+
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Parameters
+ ----------
+ my_read_beams : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_X()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ >>> # or
+ >>> op.inputs.read_beams(my_read_beams)
+ """
+ return self._read_beams
+
+
+class OutputsElectricFieldX(_Outputs):
+ """Intermediate class used to get outputs from
+ electric_field_X operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_X()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(electric_field_X._spec().outputs, op)
+ self._fields_container = Output(electric_field_X._spec().output_pin(0), 0, op)
+ self._outputs.append(self._fields_container)
+
+ @property
+ def fields_container(self):
+ """Allows to get fields_container output of the operator
+
+ Returns
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_X()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """ # noqa: E501
+ return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/electric_field_Y.py b/src/ansys/dpf/core/operators/result/electric_field_Y.py
new file mode 100644
index 0000000000..b501ff24fa
--- /dev/null
+++ b/src/ansys/dpf/core/operators/result/electric_field_Y.py
@@ -0,0 +1,653 @@
+"""
+electric_field_Y
+
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class electric_field_Y(Operator):
+ """Read/compute electric field Y component of the vector (2nd component)
+ by calling the readers defined by the datasources. Regarding the
+ requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
+ Parameters
+ ----------
+ time_scoping : Scoping or int or float or Field, optional
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+ mesh_scoping : ScopingsContainer or Scoping, optional
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+ fields_container : FieldsContainer, optional
+ Fieldscontainer already allocated modified
+ inplace
+ streams_container : StreamsContainer, optional
+ Result file container allowed to be kept open
+ to cache data
+ data_sources : DataSources
+ Result file path container, used if no
+ streams are set
+ bool_rotate_to_global : bool, optional
+ If true the field is rotated to global
+ coordinate system (default true)
+ mesh : MeshedRegion or MeshesContainer, optional
+ Prevents from reading the mesh in the result
+ files
+ requested_location : str, optional
+ Requested location, default is nodal
+ read_cyclic : int, optional
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+ read_beams : bool, optional
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Returns
+ -------
+ fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.result.electric_field_Y()
+
+ >>> # Make input connections
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.result.electric_field_Y(
+ ... time_scoping=my_time_scoping,
+ ... mesh_scoping=my_mesh_scoping,
+ ... fields_container=my_fields_container,
+ ... streams_container=my_streams_container,
+ ... data_sources=my_data_sources,
+ ... bool_rotate_to_global=my_bool_rotate_to_global,
+ ... mesh=my_mesh,
+ ... requested_location=my_requested_location,
+ ... read_cyclic=my_read_cyclic,
+ ... read_beams=my_read_beams,
+ ... )
+
+ >>> # Get output data
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(
+ self,
+ time_scoping=None,
+ mesh_scoping=None,
+ fields_container=None,
+ streams_container=None,
+ data_sources=None,
+ bool_rotate_to_global=None,
+ mesh=None,
+ requested_location=None,
+ read_cyclic=None,
+ read_beams=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(name="EFY", config=config, server=server)
+ self._inputs = InputsElectricFieldY(self)
+ self._outputs = OutputsElectricFieldY(self)
+ if time_scoping is not None:
+ self.inputs.time_scoping.connect(time_scoping)
+ if mesh_scoping is not None:
+ self.inputs.mesh_scoping.connect(mesh_scoping)
+ if fields_container is not None:
+ self.inputs.fields_container.connect(fields_container)
+ if streams_container is not None:
+ self.inputs.streams_container.connect(streams_container)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+ if bool_rotate_to_global is not None:
+ self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global)
+ if mesh is not None:
+ self.inputs.mesh.connect(mesh)
+ if requested_location is not None:
+ self.inputs.requested_location.connect(requested_location)
+ if read_cyclic is not None:
+ self.inputs.read_cyclic.connect(read_cyclic)
+ if read_beams is not None:
+ self.inputs.read_beams.connect(read_beams)
+
+ @staticmethod
+ def _spec():
+ description = """Read/compute electric field Y component of the vector (2nd component)
+ by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh
+ scoping, the result location can be
+ Nodal/ElementalNodal/Elemental."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 0: PinSpecification(
+ name="time_scoping",
+ type_names=[
+ "scoping",
+ "int32",
+ "vector",
+ "double",
+ "field",
+ "vector",
+ ],
+ optional=True,
+ document="""Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.""",
+ ),
+ 1: PinSpecification(
+ name="mesh_scoping",
+ type_names=["scopings_container", "scoping"],
+ optional=True,
+ document="""Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains""",
+ ),
+ 2: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=True,
+ document="""Fieldscontainer already allocated modified
+ inplace""",
+ ),
+ 3: PinSpecification(
+ name="streams_container",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set""",
+ ),
+ 5: PinSpecification(
+ name="bool_rotate_to_global",
+ type_names=["bool"],
+ optional=True,
+ document="""If true the field is rotated to global
+ coordinate system (default true)""",
+ ),
+ 7: PinSpecification(
+ name="mesh",
+ type_names=["abstract_meshed_region", "meshes_container"],
+ optional=True,
+ document="""Prevents from reading the mesh in the result
+ files""",
+ ),
+ 9: PinSpecification(
+ name="requested_location",
+ type_names=["string"],
+ optional=True,
+ document="""Requested location, default is nodal""",
+ ),
+ 14: PinSpecification(
+ name="read_cyclic",
+ type_names=["enum dataProcessing::ECyclicReading", "int32"],
+ optional=True,
+ document="""If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)""",
+ ),
+ 22: PinSpecification(
+ name="read_beams",
+ type_names=["bool"],
+ optional=True,
+ document="""Elemental nodal beam results are read if this
+ pin is set to true (default is false)""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(name="EFY", server=server)
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsElectricFieldY
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsElectricFieldY
+ """
+ return super().outputs
+
+
+class InputsElectricFieldY(_Inputs):
+ """Intermediate class used to connect user inputs to
+ electric_field_Y operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Y()
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(electric_field_Y._spec().inputs, op)
+ self._time_scoping = Input(electric_field_Y._spec().input_pin(0), 0, op, -1)
+ self._inputs.append(self._time_scoping)
+ self._mesh_scoping = Input(electric_field_Y._spec().input_pin(1), 1, op, -1)
+ self._inputs.append(self._mesh_scoping)
+ self._fields_container = Input(electric_field_Y._spec().input_pin(2), 2, op, -1)
+ self._inputs.append(self._fields_container)
+ self._streams_container = Input(
+ electric_field_Y._spec().input_pin(3), 3, op, -1
+ )
+ self._inputs.append(self._streams_container)
+ self._data_sources = Input(electric_field_Y._spec().input_pin(4), 4, op, -1)
+ self._inputs.append(self._data_sources)
+ self._bool_rotate_to_global = Input(
+ electric_field_Y._spec().input_pin(5), 5, op, -1
+ )
+ self._inputs.append(self._bool_rotate_to_global)
+ self._mesh = Input(electric_field_Y._spec().input_pin(7), 7, op, -1)
+ self._inputs.append(self._mesh)
+ self._requested_location = Input(
+ electric_field_Y._spec().input_pin(9), 9, op, -1
+ )
+ self._inputs.append(self._requested_location)
+ self._read_cyclic = Input(electric_field_Y._spec().input_pin(14), 14, op, -1)
+ self._inputs.append(self._read_cyclic)
+ self._read_beams = Input(electric_field_Y._spec().input_pin(22), 22, op, -1)
+ self._inputs.append(self._read_beams)
+
+ @property
+ def time_scoping(self):
+ """Allows to connect time_scoping input to the operator.
+
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+
+ Parameters
+ ----------
+ my_time_scoping : Scoping or int or float or Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Y()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> # or
+ >>> op.inputs.time_scoping(my_time_scoping)
+ """
+ return self._time_scoping
+
+ @property
+ def mesh_scoping(self):
+ """Allows to connect mesh_scoping input to the operator.
+
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+
+ Parameters
+ ----------
+ my_mesh_scoping : ScopingsContainer or Scoping
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Y()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> # or
+ >>> op.inputs.mesh_scoping(my_mesh_scoping)
+ """
+ return self._mesh_scoping
+
+ @property
+ def fields_container(self):
+ """Allows to connect fields_container input to the operator.
+
+ Fieldscontainer already allocated modified
+ inplace
+
+ Parameters
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Y()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> # or
+ >>> op.inputs.fields_container(my_fields_container)
+ """
+ return self._fields_container
+
+ @property
+ def streams_container(self):
+ """Allows to connect streams_container input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data
+
+ Parameters
+ ----------
+ my_streams_container : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Y()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> # or
+ >>> op.inputs.streams_container(my_streams_container)
+ """
+ return self._streams_container
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Y()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+ @property
+ def bool_rotate_to_global(self):
+ """Allows to connect bool_rotate_to_global input to the operator.
+
+ If true the field is rotated to global
+ coordinate system (default true)
+
+ Parameters
+ ----------
+ my_bool_rotate_to_global : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Y()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> # or
+ >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global)
+ """
+ return self._bool_rotate_to_global
+
+ @property
+ def mesh(self):
+ """Allows to connect mesh input to the operator.
+
+ Prevents from reading the mesh in the result
+ files
+
+ Parameters
+ ----------
+ my_mesh : MeshedRegion or MeshesContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Y()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> # or
+ >>> op.inputs.mesh(my_mesh)
+ """
+ return self._mesh
+
+ @property
+ def requested_location(self):
+ """Allows to connect requested_location input to the operator.
+
+ Requested location, default is nodal
+
+ Parameters
+ ----------
+ my_requested_location : str
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Y()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> # or
+ >>> op.inputs.requested_location(my_requested_location)
+ """
+ return self._requested_location
+
+ @property
+ def read_cyclic(self):
+ """Allows to connect read_cyclic input to the operator.
+
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+
+ Parameters
+ ----------
+ my_read_cyclic : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Y()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> # or
+ >>> op.inputs.read_cyclic(my_read_cyclic)
+ """
+ return self._read_cyclic
+
+ @property
+ def read_beams(self):
+ """Allows to connect read_beams input to the operator.
+
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Parameters
+ ----------
+ my_read_beams : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Y()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ >>> # or
+ >>> op.inputs.read_beams(my_read_beams)
+ """
+ return self._read_beams
+
+
+class OutputsElectricFieldY(_Outputs):
+ """Intermediate class used to get outputs from
+ electric_field_Y operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Y()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(electric_field_Y._spec().outputs, op)
+ self._fields_container = Output(electric_field_Y._spec().output_pin(0), 0, op)
+ self._outputs.append(self._fields_container)
+
+ @property
+ def fields_container(self):
+ """Allows to get fields_container output of the operator
+
+ Returns
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Y()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """ # noqa: E501
+ return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/electric_field_Z.py b/src/ansys/dpf/core/operators/result/electric_field_Z.py
new file mode 100644
index 0000000000..0dbc2c6e9f
--- /dev/null
+++ b/src/ansys/dpf/core/operators/result/electric_field_Z.py
@@ -0,0 +1,653 @@
+"""
+electric_field_Z
+
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class electric_field_Z(Operator):
+ """Read/compute electric field Z component of the vector (3rd component)
+ by calling the readers defined by the datasources. Regarding the
+ requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
+ Parameters
+ ----------
+ time_scoping : Scoping or int or float or Field, optional
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+ mesh_scoping : ScopingsContainer or Scoping, optional
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+ fields_container : FieldsContainer, optional
+ Fieldscontainer already allocated modified
+ inplace
+ streams_container : StreamsContainer, optional
+ Result file container allowed to be kept open
+ to cache data
+ data_sources : DataSources
+ Result file path container, used if no
+ streams are set
+ bool_rotate_to_global : bool, optional
+ If true the field is rotated to global
+ coordinate system (default true)
+ mesh : MeshedRegion or MeshesContainer, optional
+ Prevents from reading the mesh in the result
+ files
+ requested_location : str, optional
+ Requested location, default is nodal
+ read_cyclic : int, optional
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+ read_beams : bool, optional
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Returns
+ -------
+ fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.result.electric_field_Z()
+
+ >>> # Make input connections
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.result.electric_field_Z(
+ ... time_scoping=my_time_scoping,
+ ... mesh_scoping=my_mesh_scoping,
+ ... fields_container=my_fields_container,
+ ... streams_container=my_streams_container,
+ ... data_sources=my_data_sources,
+ ... bool_rotate_to_global=my_bool_rotate_to_global,
+ ... mesh=my_mesh,
+ ... requested_location=my_requested_location,
+ ... read_cyclic=my_read_cyclic,
+ ... read_beams=my_read_beams,
+ ... )
+
+ >>> # Get output data
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(
+ self,
+ time_scoping=None,
+ mesh_scoping=None,
+ fields_container=None,
+ streams_container=None,
+ data_sources=None,
+ bool_rotate_to_global=None,
+ mesh=None,
+ requested_location=None,
+ read_cyclic=None,
+ read_beams=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(name="EFZ", config=config, server=server)
+ self._inputs = InputsElectricFieldZ(self)
+ self._outputs = OutputsElectricFieldZ(self)
+ if time_scoping is not None:
+ self.inputs.time_scoping.connect(time_scoping)
+ if mesh_scoping is not None:
+ self.inputs.mesh_scoping.connect(mesh_scoping)
+ if fields_container is not None:
+ self.inputs.fields_container.connect(fields_container)
+ if streams_container is not None:
+ self.inputs.streams_container.connect(streams_container)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+ if bool_rotate_to_global is not None:
+ self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global)
+ if mesh is not None:
+ self.inputs.mesh.connect(mesh)
+ if requested_location is not None:
+ self.inputs.requested_location.connect(requested_location)
+ if read_cyclic is not None:
+ self.inputs.read_cyclic.connect(read_cyclic)
+ if read_beams is not None:
+ self.inputs.read_beams.connect(read_beams)
+
+ @staticmethod
+ def _spec():
+ description = """Read/compute electric field Z component of the vector (3rd component)
+ by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh
+ scoping, the result location can be
+ Nodal/ElementalNodal/Elemental."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 0: PinSpecification(
+ name="time_scoping",
+ type_names=[
+ "scoping",
+ "int32",
+ "vector",
+ "double",
+ "field",
+ "vector",
+ ],
+ optional=True,
+ document="""Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.""",
+ ),
+ 1: PinSpecification(
+ name="mesh_scoping",
+ type_names=["scopings_container", "scoping"],
+ optional=True,
+ document="""Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains""",
+ ),
+ 2: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=True,
+ document="""Fieldscontainer already allocated modified
+ inplace""",
+ ),
+ 3: PinSpecification(
+ name="streams_container",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set""",
+ ),
+ 5: PinSpecification(
+ name="bool_rotate_to_global",
+ type_names=["bool"],
+ optional=True,
+ document="""If true the field is rotated to global
+ coordinate system (default true)""",
+ ),
+ 7: PinSpecification(
+ name="mesh",
+ type_names=["abstract_meshed_region", "meshes_container"],
+ optional=True,
+ document="""Prevents from reading the mesh in the result
+ files""",
+ ),
+ 9: PinSpecification(
+ name="requested_location",
+ type_names=["string"],
+ optional=True,
+ document="""Requested location, default is nodal""",
+ ),
+ 14: PinSpecification(
+ name="read_cyclic",
+ type_names=["enum dataProcessing::ECyclicReading", "int32"],
+ optional=True,
+ document="""If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)""",
+ ),
+ 22: PinSpecification(
+ name="read_beams",
+ type_names=["bool"],
+ optional=True,
+ document="""Elemental nodal beam results are read if this
+ pin is set to true (default is false)""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(name="EFZ", server=server)
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsElectricFieldZ
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsElectricFieldZ
+ """
+ return super().outputs
+
+
+class InputsElectricFieldZ(_Inputs):
+ """Intermediate class used to connect user inputs to
+ electric_field_Z operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Z()
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(electric_field_Z._spec().inputs, op)
+ self._time_scoping = Input(electric_field_Z._spec().input_pin(0), 0, op, -1)
+ self._inputs.append(self._time_scoping)
+ self._mesh_scoping = Input(electric_field_Z._spec().input_pin(1), 1, op, -1)
+ self._inputs.append(self._mesh_scoping)
+ self._fields_container = Input(electric_field_Z._spec().input_pin(2), 2, op, -1)
+ self._inputs.append(self._fields_container)
+ self._streams_container = Input(
+ electric_field_Z._spec().input_pin(3), 3, op, -1
+ )
+ self._inputs.append(self._streams_container)
+ self._data_sources = Input(electric_field_Z._spec().input_pin(4), 4, op, -1)
+ self._inputs.append(self._data_sources)
+ self._bool_rotate_to_global = Input(
+ electric_field_Z._spec().input_pin(5), 5, op, -1
+ )
+ self._inputs.append(self._bool_rotate_to_global)
+ self._mesh = Input(electric_field_Z._spec().input_pin(7), 7, op, -1)
+ self._inputs.append(self._mesh)
+ self._requested_location = Input(
+ electric_field_Z._spec().input_pin(9), 9, op, -1
+ )
+ self._inputs.append(self._requested_location)
+ self._read_cyclic = Input(electric_field_Z._spec().input_pin(14), 14, op, -1)
+ self._inputs.append(self._read_cyclic)
+ self._read_beams = Input(electric_field_Z._spec().input_pin(22), 22, op, -1)
+ self._inputs.append(self._read_beams)
+
+ @property
+ def time_scoping(self):
+ """Allows to connect time_scoping input to the operator.
+
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+
+ Parameters
+ ----------
+ my_time_scoping : Scoping or int or float or Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Z()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> # or
+ >>> op.inputs.time_scoping(my_time_scoping)
+ """
+ return self._time_scoping
+
+ @property
+ def mesh_scoping(self):
+ """Allows to connect mesh_scoping input to the operator.
+
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+
+ Parameters
+ ----------
+ my_mesh_scoping : ScopingsContainer or Scoping
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Z()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> # or
+ >>> op.inputs.mesh_scoping(my_mesh_scoping)
+ """
+ return self._mesh_scoping
+
+ @property
+ def fields_container(self):
+ """Allows to connect fields_container input to the operator.
+
+ Fieldscontainer already allocated modified
+ inplace
+
+ Parameters
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Z()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> # or
+ >>> op.inputs.fields_container(my_fields_container)
+ """
+ return self._fields_container
+
+ @property
+ def streams_container(self):
+ """Allows to connect streams_container input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data
+
+ Parameters
+ ----------
+ my_streams_container : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Z()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> # or
+ >>> op.inputs.streams_container(my_streams_container)
+ """
+ return self._streams_container
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Z()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+ @property
+ def bool_rotate_to_global(self):
+ """Allows to connect bool_rotate_to_global input to the operator.
+
+ If true the field is rotated to global
+ coordinate system (default true)
+
+ Parameters
+ ----------
+ my_bool_rotate_to_global : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Z()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> # or
+ >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global)
+ """
+ return self._bool_rotate_to_global
+
+ @property
+ def mesh(self):
+ """Allows to connect mesh input to the operator.
+
+ Prevents from reading the mesh in the result
+ files
+
+ Parameters
+ ----------
+ my_mesh : MeshedRegion or MeshesContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Z()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> # or
+ >>> op.inputs.mesh(my_mesh)
+ """
+ return self._mesh
+
+ @property
+ def requested_location(self):
+ """Allows to connect requested_location input to the operator.
+
+ Requested location, default is nodal
+
+ Parameters
+ ----------
+ my_requested_location : str
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Z()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> # or
+ >>> op.inputs.requested_location(my_requested_location)
+ """
+ return self._requested_location
+
+ @property
+ def read_cyclic(self):
+ """Allows to connect read_cyclic input to the operator.
+
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+
+ Parameters
+ ----------
+ my_read_cyclic : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Z()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> # or
+ >>> op.inputs.read_cyclic(my_read_cyclic)
+ """
+ return self._read_cyclic
+
+ @property
+ def read_beams(self):
+ """Allows to connect read_beams input to the operator.
+
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Parameters
+ ----------
+ my_read_beams : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Z()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ >>> # or
+ >>> op.inputs.read_beams(my_read_beams)
+ """
+ return self._read_beams
+
+
+class OutputsElectricFieldZ(_Outputs):
+ """Intermediate class used to get outputs from
+ electric_field_Z operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Z()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(electric_field_Z._spec().outputs, op)
+ self._fields_container = Output(electric_field_Z._spec().output_pin(0), 0, op)
+ self._outputs.append(self._fields_container)
+
+ @property
+ def fields_container(self):
+ """Allows to get fields_container output of the operator
+
+ Returns
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_field_Z()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """ # noqa: E501
+ return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/electric_flux_density_X.py b/src/ansys/dpf/core/operators/result/electric_flux_density_X.py
new file mode 100644
index 0000000000..ecd132ad55
--- /dev/null
+++ b/src/ansys/dpf/core/operators/result/electric_flux_density_X.py
@@ -0,0 +1,667 @@
+"""
+electric_flux_density_X
+
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class electric_flux_density_X(Operator):
+ """Read/compute Electric flux density X component of the vector (1st
+ component) by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh scoping, the
+ result location can be Nodal/ElementalNodal/Elemental.
+
+ Parameters
+ ----------
+ time_scoping : Scoping or int or float or Field, optional
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+ mesh_scoping : ScopingsContainer or Scoping, optional
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+ fields_container : FieldsContainer, optional
+ Fieldscontainer already allocated modified
+ inplace
+ streams_container : StreamsContainer, optional
+ Result file container allowed to be kept open
+ to cache data
+ data_sources : DataSources
+ Result file path container, used if no
+ streams are set
+ bool_rotate_to_global : bool, optional
+ If true the field is rotated to global
+ coordinate system (default true)
+ mesh : MeshedRegion or MeshesContainer, optional
+ Prevents from reading the mesh in the result
+ files
+ requested_location : str, optional
+ Requested location, default is nodal
+ read_cyclic : int, optional
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+ read_beams : bool, optional
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Returns
+ -------
+ fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.result.electric_flux_density_X()
+
+ >>> # Make input connections
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.result.electric_flux_density_X(
+ ... time_scoping=my_time_scoping,
+ ... mesh_scoping=my_mesh_scoping,
+ ... fields_container=my_fields_container,
+ ... streams_container=my_streams_container,
+ ... data_sources=my_data_sources,
+ ... bool_rotate_to_global=my_bool_rotate_to_global,
+ ... mesh=my_mesh,
+ ... requested_location=my_requested_location,
+ ... read_cyclic=my_read_cyclic,
+ ... read_beams=my_read_beams,
+ ... )
+
+ >>> # Get output data
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(
+ self,
+ time_scoping=None,
+ mesh_scoping=None,
+ fields_container=None,
+ streams_container=None,
+ data_sources=None,
+ bool_rotate_to_global=None,
+ mesh=None,
+ requested_location=None,
+ read_cyclic=None,
+ read_beams=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(name="EFDX", config=config, server=server)
+ self._inputs = InputsElectricFluxDensityX(self)
+ self._outputs = OutputsElectricFluxDensityX(self)
+ if time_scoping is not None:
+ self.inputs.time_scoping.connect(time_scoping)
+ if mesh_scoping is not None:
+ self.inputs.mesh_scoping.connect(mesh_scoping)
+ if fields_container is not None:
+ self.inputs.fields_container.connect(fields_container)
+ if streams_container is not None:
+ self.inputs.streams_container.connect(streams_container)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+ if bool_rotate_to_global is not None:
+ self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global)
+ if mesh is not None:
+ self.inputs.mesh.connect(mesh)
+ if requested_location is not None:
+ self.inputs.requested_location.connect(requested_location)
+ if read_cyclic is not None:
+ self.inputs.read_cyclic.connect(read_cyclic)
+ if read_beams is not None:
+ self.inputs.read_beams.connect(read_beams)
+
+ @staticmethod
+ def _spec():
+ description = """Read/compute Electric flux density X component of the vector (1st
+ component) by calling the readers defined by the
+ datasources. Regarding the requested location and the
+ input mesh scoping, the result location can be
+ Nodal/ElementalNodal/Elemental."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 0: PinSpecification(
+ name="time_scoping",
+ type_names=[
+ "scoping",
+ "int32",
+ "vector",
+ "double",
+ "field",
+ "vector",
+ ],
+ optional=True,
+ document="""Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.""",
+ ),
+ 1: PinSpecification(
+ name="mesh_scoping",
+ type_names=["scopings_container", "scoping"],
+ optional=True,
+ document="""Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains""",
+ ),
+ 2: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=True,
+ document="""Fieldscontainer already allocated modified
+ inplace""",
+ ),
+ 3: PinSpecification(
+ name="streams_container",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set""",
+ ),
+ 5: PinSpecification(
+ name="bool_rotate_to_global",
+ type_names=["bool"],
+ optional=True,
+ document="""If true the field is rotated to global
+ coordinate system (default true)""",
+ ),
+ 7: PinSpecification(
+ name="mesh",
+ type_names=["abstract_meshed_region", "meshes_container"],
+ optional=True,
+ document="""Prevents from reading the mesh in the result
+ files""",
+ ),
+ 9: PinSpecification(
+ name="requested_location",
+ type_names=["string"],
+ optional=True,
+ document="""Requested location, default is nodal""",
+ ),
+ 14: PinSpecification(
+ name="read_cyclic",
+ type_names=["enum dataProcessing::ECyclicReading", "int32"],
+ optional=True,
+ document="""If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)""",
+ ),
+ 22: PinSpecification(
+ name="read_beams",
+ type_names=["bool"],
+ optional=True,
+ document="""Elemental nodal beam results are read if this
+ pin is set to true (default is false)""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(name="EFDX", server=server)
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsElectricFluxDensityX
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsElectricFluxDensityX
+ """
+ return super().outputs
+
+
+class InputsElectricFluxDensityX(_Inputs):
+ """Intermediate class used to connect user inputs to
+ electric_flux_density_X operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_X()
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(electric_flux_density_X._spec().inputs, op)
+ self._time_scoping = Input(
+ electric_flux_density_X._spec().input_pin(0), 0, op, -1
+ )
+ self._inputs.append(self._time_scoping)
+ self._mesh_scoping = Input(
+ electric_flux_density_X._spec().input_pin(1), 1, op, -1
+ )
+ self._inputs.append(self._mesh_scoping)
+ self._fields_container = Input(
+ electric_flux_density_X._spec().input_pin(2), 2, op, -1
+ )
+ self._inputs.append(self._fields_container)
+ self._streams_container = Input(
+ electric_flux_density_X._spec().input_pin(3), 3, op, -1
+ )
+ self._inputs.append(self._streams_container)
+ self._data_sources = Input(
+ electric_flux_density_X._spec().input_pin(4), 4, op, -1
+ )
+ self._inputs.append(self._data_sources)
+ self._bool_rotate_to_global = Input(
+ electric_flux_density_X._spec().input_pin(5), 5, op, -1
+ )
+ self._inputs.append(self._bool_rotate_to_global)
+ self._mesh = Input(electric_flux_density_X._spec().input_pin(7), 7, op, -1)
+ self._inputs.append(self._mesh)
+ self._requested_location = Input(
+ electric_flux_density_X._spec().input_pin(9), 9, op, -1
+ )
+ self._inputs.append(self._requested_location)
+ self._read_cyclic = Input(
+ electric_flux_density_X._spec().input_pin(14), 14, op, -1
+ )
+ self._inputs.append(self._read_cyclic)
+ self._read_beams = Input(
+ electric_flux_density_X._spec().input_pin(22), 22, op, -1
+ )
+ self._inputs.append(self._read_beams)
+
+ @property
+ def time_scoping(self):
+ """Allows to connect time_scoping input to the operator.
+
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+
+ Parameters
+ ----------
+ my_time_scoping : Scoping or int or float or Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_X()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> # or
+ >>> op.inputs.time_scoping(my_time_scoping)
+ """
+ return self._time_scoping
+
+ @property
+ def mesh_scoping(self):
+ """Allows to connect mesh_scoping input to the operator.
+
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+
+ Parameters
+ ----------
+ my_mesh_scoping : ScopingsContainer or Scoping
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_X()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> # or
+ >>> op.inputs.mesh_scoping(my_mesh_scoping)
+ """
+ return self._mesh_scoping
+
+ @property
+ def fields_container(self):
+ """Allows to connect fields_container input to the operator.
+
+ Fieldscontainer already allocated modified
+ inplace
+
+ Parameters
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_X()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> # or
+ >>> op.inputs.fields_container(my_fields_container)
+ """
+ return self._fields_container
+
+ @property
+ def streams_container(self):
+ """Allows to connect streams_container input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data
+
+ Parameters
+ ----------
+ my_streams_container : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_X()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> # or
+ >>> op.inputs.streams_container(my_streams_container)
+ """
+ return self._streams_container
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_X()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+ @property
+ def bool_rotate_to_global(self):
+ """Allows to connect bool_rotate_to_global input to the operator.
+
+ If true the field is rotated to global
+ coordinate system (default true)
+
+ Parameters
+ ----------
+ my_bool_rotate_to_global : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_X()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> # or
+ >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global)
+ """
+ return self._bool_rotate_to_global
+
+ @property
+ def mesh(self):
+ """Allows to connect mesh input to the operator.
+
+ Prevents from reading the mesh in the result
+ files
+
+ Parameters
+ ----------
+ my_mesh : MeshedRegion or MeshesContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_X()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> # or
+ >>> op.inputs.mesh(my_mesh)
+ """
+ return self._mesh
+
+ @property
+ def requested_location(self):
+ """Allows to connect requested_location input to the operator.
+
+ Requested location, default is nodal
+
+ Parameters
+ ----------
+ my_requested_location : str
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_X()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> # or
+ >>> op.inputs.requested_location(my_requested_location)
+ """
+ return self._requested_location
+
+ @property
+ def read_cyclic(self):
+ """Allows to connect read_cyclic input to the operator.
+
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+
+ Parameters
+ ----------
+ my_read_cyclic : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_X()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> # or
+ >>> op.inputs.read_cyclic(my_read_cyclic)
+ """
+ return self._read_cyclic
+
+ @property
+ def read_beams(self):
+ """Allows to connect read_beams input to the operator.
+
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Parameters
+ ----------
+ my_read_beams : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_X()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ >>> # or
+ >>> op.inputs.read_beams(my_read_beams)
+ """
+ return self._read_beams
+
+
+class OutputsElectricFluxDensityX(_Outputs):
+ """Intermediate class used to get outputs from
+ electric_flux_density_X operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_X()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(electric_flux_density_X._spec().outputs, op)
+ self._fields_container = Output(
+ electric_flux_density_X._spec().output_pin(0), 0, op
+ )
+ self._outputs.append(self._fields_container)
+
+ @property
+ def fields_container(self):
+ """Allows to get fields_container output of the operator
+
+ Returns
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_X()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """ # noqa: E501
+ return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/electric_flux_density_Y.py b/src/ansys/dpf/core/operators/result/electric_flux_density_Y.py
new file mode 100644
index 0000000000..4d0c69a5b0
--- /dev/null
+++ b/src/ansys/dpf/core/operators/result/electric_flux_density_Y.py
@@ -0,0 +1,667 @@
+"""
+electric_flux_density_Y
+
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class electric_flux_density_Y(Operator):
+ """Read/compute Electric flux density Y component of the vector (2nd
+ component) by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh scoping, the
+ result location can be Nodal/ElementalNodal/Elemental.
+
+ Parameters
+ ----------
+ time_scoping : Scoping or int or float or Field, optional
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+ mesh_scoping : ScopingsContainer or Scoping, optional
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+ fields_container : FieldsContainer, optional
+ Fieldscontainer already allocated modified
+ inplace
+ streams_container : StreamsContainer, optional
+ Result file container allowed to be kept open
+ to cache data
+ data_sources : DataSources
+ Result file path container, used if no
+ streams are set
+ bool_rotate_to_global : bool, optional
+ If true the field is rotated to global
+ coordinate system (default true)
+ mesh : MeshedRegion or MeshesContainer, optional
+ Prevents from reading the mesh in the result
+ files
+ requested_location : str, optional
+ Requested location, default is nodal
+ read_cyclic : int, optional
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+ read_beams : bool, optional
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Returns
+ -------
+ fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.result.electric_flux_density_Y()
+
+ >>> # Make input connections
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.result.electric_flux_density_Y(
+ ... time_scoping=my_time_scoping,
+ ... mesh_scoping=my_mesh_scoping,
+ ... fields_container=my_fields_container,
+ ... streams_container=my_streams_container,
+ ... data_sources=my_data_sources,
+ ... bool_rotate_to_global=my_bool_rotate_to_global,
+ ... mesh=my_mesh,
+ ... requested_location=my_requested_location,
+ ... read_cyclic=my_read_cyclic,
+ ... read_beams=my_read_beams,
+ ... )
+
+ >>> # Get output data
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(
+ self,
+ time_scoping=None,
+ mesh_scoping=None,
+ fields_container=None,
+ streams_container=None,
+ data_sources=None,
+ bool_rotate_to_global=None,
+ mesh=None,
+ requested_location=None,
+ read_cyclic=None,
+ read_beams=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(name="EFDY", config=config, server=server)
+ self._inputs = InputsElectricFluxDensityY(self)
+ self._outputs = OutputsElectricFluxDensityY(self)
+ if time_scoping is not None:
+ self.inputs.time_scoping.connect(time_scoping)
+ if mesh_scoping is not None:
+ self.inputs.mesh_scoping.connect(mesh_scoping)
+ if fields_container is not None:
+ self.inputs.fields_container.connect(fields_container)
+ if streams_container is not None:
+ self.inputs.streams_container.connect(streams_container)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+ if bool_rotate_to_global is not None:
+ self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global)
+ if mesh is not None:
+ self.inputs.mesh.connect(mesh)
+ if requested_location is not None:
+ self.inputs.requested_location.connect(requested_location)
+ if read_cyclic is not None:
+ self.inputs.read_cyclic.connect(read_cyclic)
+ if read_beams is not None:
+ self.inputs.read_beams.connect(read_beams)
+
+ @staticmethod
+ def _spec():
+ description = """Read/compute Electric flux density Y component of the vector (2nd
+ component) by calling the readers defined by the
+ datasources. Regarding the requested location and the
+ input mesh scoping, the result location can be
+ Nodal/ElementalNodal/Elemental."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 0: PinSpecification(
+ name="time_scoping",
+ type_names=[
+ "scoping",
+ "int32",
+ "vector",
+ "double",
+ "field",
+ "vector",
+ ],
+ optional=True,
+ document="""Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.""",
+ ),
+ 1: PinSpecification(
+ name="mesh_scoping",
+ type_names=["scopings_container", "scoping"],
+ optional=True,
+ document="""Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains""",
+ ),
+ 2: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=True,
+ document="""Fieldscontainer already allocated modified
+ inplace""",
+ ),
+ 3: PinSpecification(
+ name="streams_container",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set""",
+ ),
+ 5: PinSpecification(
+ name="bool_rotate_to_global",
+ type_names=["bool"],
+ optional=True,
+ document="""If true the field is rotated to global
+ coordinate system (default true)""",
+ ),
+ 7: PinSpecification(
+ name="mesh",
+ type_names=["abstract_meshed_region", "meshes_container"],
+ optional=True,
+ document="""Prevents from reading the mesh in the result
+ files""",
+ ),
+ 9: PinSpecification(
+ name="requested_location",
+ type_names=["string"],
+ optional=True,
+ document="""Requested location, default is nodal""",
+ ),
+ 14: PinSpecification(
+ name="read_cyclic",
+ type_names=["enum dataProcessing::ECyclicReading", "int32"],
+ optional=True,
+ document="""If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)""",
+ ),
+ 22: PinSpecification(
+ name="read_beams",
+ type_names=["bool"],
+ optional=True,
+ document="""Elemental nodal beam results are read if this
+ pin is set to true (default is false)""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(name="EFDY", server=server)
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsElectricFluxDensityY
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsElectricFluxDensityY
+ """
+ return super().outputs
+
+
+class InputsElectricFluxDensityY(_Inputs):
+ """Intermediate class used to connect user inputs to
+ electric_flux_density_Y operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Y()
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(electric_flux_density_Y._spec().inputs, op)
+ self._time_scoping = Input(
+ electric_flux_density_Y._spec().input_pin(0), 0, op, -1
+ )
+ self._inputs.append(self._time_scoping)
+ self._mesh_scoping = Input(
+ electric_flux_density_Y._spec().input_pin(1), 1, op, -1
+ )
+ self._inputs.append(self._mesh_scoping)
+ self._fields_container = Input(
+ electric_flux_density_Y._spec().input_pin(2), 2, op, -1
+ )
+ self._inputs.append(self._fields_container)
+ self._streams_container = Input(
+ electric_flux_density_Y._spec().input_pin(3), 3, op, -1
+ )
+ self._inputs.append(self._streams_container)
+ self._data_sources = Input(
+ electric_flux_density_Y._spec().input_pin(4), 4, op, -1
+ )
+ self._inputs.append(self._data_sources)
+ self._bool_rotate_to_global = Input(
+ electric_flux_density_Y._spec().input_pin(5), 5, op, -1
+ )
+ self._inputs.append(self._bool_rotate_to_global)
+ self._mesh = Input(electric_flux_density_Y._spec().input_pin(7), 7, op, -1)
+ self._inputs.append(self._mesh)
+ self._requested_location = Input(
+ electric_flux_density_Y._spec().input_pin(9), 9, op, -1
+ )
+ self._inputs.append(self._requested_location)
+ self._read_cyclic = Input(
+ electric_flux_density_Y._spec().input_pin(14), 14, op, -1
+ )
+ self._inputs.append(self._read_cyclic)
+ self._read_beams = Input(
+ electric_flux_density_Y._spec().input_pin(22), 22, op, -1
+ )
+ self._inputs.append(self._read_beams)
+
+ @property
+ def time_scoping(self):
+ """Allows to connect time_scoping input to the operator.
+
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+
+ Parameters
+ ----------
+ my_time_scoping : Scoping or int or float or Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Y()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> # or
+ >>> op.inputs.time_scoping(my_time_scoping)
+ """
+ return self._time_scoping
+
+ @property
+ def mesh_scoping(self):
+ """Allows to connect mesh_scoping input to the operator.
+
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+
+ Parameters
+ ----------
+ my_mesh_scoping : ScopingsContainer or Scoping
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Y()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> # or
+ >>> op.inputs.mesh_scoping(my_mesh_scoping)
+ """
+ return self._mesh_scoping
+
+ @property
+ def fields_container(self):
+ """Allows to connect fields_container input to the operator.
+
+ Fieldscontainer already allocated modified
+ inplace
+
+ Parameters
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Y()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> # or
+ >>> op.inputs.fields_container(my_fields_container)
+ """
+ return self._fields_container
+
+ @property
+ def streams_container(self):
+ """Allows to connect streams_container input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data
+
+ Parameters
+ ----------
+ my_streams_container : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Y()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> # or
+ >>> op.inputs.streams_container(my_streams_container)
+ """
+ return self._streams_container
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Y()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+ @property
+ def bool_rotate_to_global(self):
+ """Allows to connect bool_rotate_to_global input to the operator.
+
+ If true the field is rotated to global
+ coordinate system (default true)
+
+ Parameters
+ ----------
+ my_bool_rotate_to_global : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Y()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> # or
+ >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global)
+ """
+ return self._bool_rotate_to_global
+
+ @property
+ def mesh(self):
+ """Allows to connect mesh input to the operator.
+
+ Prevents from reading the mesh in the result
+ files
+
+ Parameters
+ ----------
+ my_mesh : MeshedRegion or MeshesContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Y()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> # or
+ >>> op.inputs.mesh(my_mesh)
+ """
+ return self._mesh
+
+ @property
+ def requested_location(self):
+ """Allows to connect requested_location input to the operator.
+
+ Requested location, default is nodal
+
+ Parameters
+ ----------
+ my_requested_location : str
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Y()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> # or
+ >>> op.inputs.requested_location(my_requested_location)
+ """
+ return self._requested_location
+
+ @property
+ def read_cyclic(self):
+ """Allows to connect read_cyclic input to the operator.
+
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+
+ Parameters
+ ----------
+ my_read_cyclic : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Y()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> # or
+ >>> op.inputs.read_cyclic(my_read_cyclic)
+ """
+ return self._read_cyclic
+
+ @property
+ def read_beams(self):
+ """Allows to connect read_beams input to the operator.
+
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Parameters
+ ----------
+ my_read_beams : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Y()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ >>> # or
+ >>> op.inputs.read_beams(my_read_beams)
+ """
+ return self._read_beams
+
+
+class OutputsElectricFluxDensityY(_Outputs):
+ """Intermediate class used to get outputs from
+ electric_flux_density_Y operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Y()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(electric_flux_density_Y._spec().outputs, op)
+ self._fields_container = Output(
+ electric_flux_density_Y._spec().output_pin(0), 0, op
+ )
+ self._outputs.append(self._fields_container)
+
+ @property
+ def fields_container(self):
+ """Allows to get fields_container output of the operator
+
+ Returns
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Y()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """ # noqa: E501
+ return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/electric_flux_density_Z.py b/src/ansys/dpf/core/operators/result/electric_flux_density_Z.py
new file mode 100644
index 0000000000..0e0be41756
--- /dev/null
+++ b/src/ansys/dpf/core/operators/result/electric_flux_density_Z.py
@@ -0,0 +1,667 @@
+"""
+electric_flux_density_Z
+
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class electric_flux_density_Z(Operator):
+ """Read/compute Electric flux density Z component of the vector (3rd
+ component) by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh scoping, the
+ result location can be Nodal/ElementalNodal/Elemental.
+
+ Parameters
+ ----------
+ time_scoping : Scoping or int or float or Field, optional
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+ mesh_scoping : ScopingsContainer or Scoping, optional
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+ fields_container : FieldsContainer, optional
+ Fieldscontainer already allocated modified
+ inplace
+ streams_container : StreamsContainer, optional
+ Result file container allowed to be kept open
+ to cache data
+ data_sources : DataSources
+ Result file path container, used if no
+ streams are set
+ bool_rotate_to_global : bool, optional
+ If true the field is rotated to global
+ coordinate system (default true)
+ mesh : MeshedRegion or MeshesContainer, optional
+ Prevents from reading the mesh in the result
+ files
+ requested_location : str, optional
+ Requested location, default is nodal
+ read_cyclic : int, optional
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+ read_beams : bool, optional
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Returns
+ -------
+ fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.result.electric_flux_density_Z()
+
+ >>> # Make input connections
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.result.electric_flux_density_Z(
+ ... time_scoping=my_time_scoping,
+ ... mesh_scoping=my_mesh_scoping,
+ ... fields_container=my_fields_container,
+ ... streams_container=my_streams_container,
+ ... data_sources=my_data_sources,
+ ... bool_rotate_to_global=my_bool_rotate_to_global,
+ ... mesh=my_mesh,
+ ... requested_location=my_requested_location,
+ ... read_cyclic=my_read_cyclic,
+ ... read_beams=my_read_beams,
+ ... )
+
+ >>> # Get output data
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(
+ self,
+ time_scoping=None,
+ mesh_scoping=None,
+ fields_container=None,
+ streams_container=None,
+ data_sources=None,
+ bool_rotate_to_global=None,
+ mesh=None,
+ requested_location=None,
+ read_cyclic=None,
+ read_beams=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(name="EFDZ", config=config, server=server)
+ self._inputs = InputsElectricFluxDensityZ(self)
+ self._outputs = OutputsElectricFluxDensityZ(self)
+ if time_scoping is not None:
+ self.inputs.time_scoping.connect(time_scoping)
+ if mesh_scoping is not None:
+ self.inputs.mesh_scoping.connect(mesh_scoping)
+ if fields_container is not None:
+ self.inputs.fields_container.connect(fields_container)
+ if streams_container is not None:
+ self.inputs.streams_container.connect(streams_container)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+ if bool_rotate_to_global is not None:
+ self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global)
+ if mesh is not None:
+ self.inputs.mesh.connect(mesh)
+ if requested_location is not None:
+ self.inputs.requested_location.connect(requested_location)
+ if read_cyclic is not None:
+ self.inputs.read_cyclic.connect(read_cyclic)
+ if read_beams is not None:
+ self.inputs.read_beams.connect(read_beams)
+
+ @staticmethod
+ def _spec():
+ description = """Read/compute Electric flux density Z component of the vector (3rd
+ component) by calling the readers defined by the
+ datasources. Regarding the requested location and the
+ input mesh scoping, the result location can be
+ Nodal/ElementalNodal/Elemental."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 0: PinSpecification(
+ name="time_scoping",
+ type_names=[
+ "scoping",
+ "int32",
+ "vector",
+ "double",
+ "field",
+ "vector",
+ ],
+ optional=True,
+ document="""Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.""",
+ ),
+ 1: PinSpecification(
+ name="mesh_scoping",
+ type_names=["scopings_container", "scoping"],
+ optional=True,
+ document="""Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains""",
+ ),
+ 2: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=True,
+ document="""Fieldscontainer already allocated modified
+ inplace""",
+ ),
+ 3: PinSpecification(
+ name="streams_container",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set""",
+ ),
+ 5: PinSpecification(
+ name="bool_rotate_to_global",
+ type_names=["bool"],
+ optional=True,
+ document="""If true the field is rotated to global
+ coordinate system (default true)""",
+ ),
+ 7: PinSpecification(
+ name="mesh",
+ type_names=["abstract_meshed_region", "meshes_container"],
+ optional=True,
+ document="""Prevents from reading the mesh in the result
+ files""",
+ ),
+ 9: PinSpecification(
+ name="requested_location",
+ type_names=["string"],
+ optional=True,
+ document="""Requested location, default is nodal""",
+ ),
+ 14: PinSpecification(
+ name="read_cyclic",
+ type_names=["enum dataProcessing::ECyclicReading", "int32"],
+ optional=True,
+ document="""If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)""",
+ ),
+ 22: PinSpecification(
+ name="read_beams",
+ type_names=["bool"],
+ optional=True,
+ document="""Elemental nodal beam results are read if this
+ pin is set to true (default is false)""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(name="EFDZ", server=server)
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsElectricFluxDensityZ
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsElectricFluxDensityZ
+ """
+ return super().outputs
+
+
+class InputsElectricFluxDensityZ(_Inputs):
+ """Intermediate class used to connect user inputs to
+ electric_flux_density_Z operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Z()
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(electric_flux_density_Z._spec().inputs, op)
+ self._time_scoping = Input(
+ electric_flux_density_Z._spec().input_pin(0), 0, op, -1
+ )
+ self._inputs.append(self._time_scoping)
+ self._mesh_scoping = Input(
+ electric_flux_density_Z._spec().input_pin(1), 1, op, -1
+ )
+ self._inputs.append(self._mesh_scoping)
+ self._fields_container = Input(
+ electric_flux_density_Z._spec().input_pin(2), 2, op, -1
+ )
+ self._inputs.append(self._fields_container)
+ self._streams_container = Input(
+ electric_flux_density_Z._spec().input_pin(3), 3, op, -1
+ )
+ self._inputs.append(self._streams_container)
+ self._data_sources = Input(
+ electric_flux_density_Z._spec().input_pin(4), 4, op, -1
+ )
+ self._inputs.append(self._data_sources)
+ self._bool_rotate_to_global = Input(
+ electric_flux_density_Z._spec().input_pin(5), 5, op, -1
+ )
+ self._inputs.append(self._bool_rotate_to_global)
+ self._mesh = Input(electric_flux_density_Z._spec().input_pin(7), 7, op, -1)
+ self._inputs.append(self._mesh)
+ self._requested_location = Input(
+ electric_flux_density_Z._spec().input_pin(9), 9, op, -1
+ )
+ self._inputs.append(self._requested_location)
+ self._read_cyclic = Input(
+ electric_flux_density_Z._spec().input_pin(14), 14, op, -1
+ )
+ self._inputs.append(self._read_cyclic)
+ self._read_beams = Input(
+ electric_flux_density_Z._spec().input_pin(22), 22, op, -1
+ )
+ self._inputs.append(self._read_beams)
+
+ @property
+ def time_scoping(self):
+ """Allows to connect time_scoping input to the operator.
+
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+
+ Parameters
+ ----------
+ my_time_scoping : Scoping or int or float or Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Z()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> # or
+ >>> op.inputs.time_scoping(my_time_scoping)
+ """
+ return self._time_scoping
+
+ @property
+ def mesh_scoping(self):
+ """Allows to connect mesh_scoping input to the operator.
+
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+
+ Parameters
+ ----------
+ my_mesh_scoping : ScopingsContainer or Scoping
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Z()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> # or
+ >>> op.inputs.mesh_scoping(my_mesh_scoping)
+ """
+ return self._mesh_scoping
+
+ @property
+ def fields_container(self):
+ """Allows to connect fields_container input to the operator.
+
+ Fieldscontainer already allocated modified
+ inplace
+
+ Parameters
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Z()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> # or
+ >>> op.inputs.fields_container(my_fields_container)
+ """
+ return self._fields_container
+
+ @property
+ def streams_container(self):
+ """Allows to connect streams_container input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data
+
+ Parameters
+ ----------
+ my_streams_container : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Z()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> # or
+ >>> op.inputs.streams_container(my_streams_container)
+ """
+ return self._streams_container
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Z()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+ @property
+ def bool_rotate_to_global(self):
+ """Allows to connect bool_rotate_to_global input to the operator.
+
+ If true the field is rotated to global
+ coordinate system (default true)
+
+ Parameters
+ ----------
+ my_bool_rotate_to_global : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Z()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> # or
+ >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global)
+ """
+ return self._bool_rotate_to_global
+
+ @property
+ def mesh(self):
+ """Allows to connect mesh input to the operator.
+
+ Prevents from reading the mesh in the result
+ files
+
+ Parameters
+ ----------
+ my_mesh : MeshedRegion or MeshesContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Z()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> # or
+ >>> op.inputs.mesh(my_mesh)
+ """
+ return self._mesh
+
+ @property
+ def requested_location(self):
+ """Allows to connect requested_location input to the operator.
+
+ Requested location, default is nodal
+
+ Parameters
+ ----------
+ my_requested_location : str
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Z()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> # or
+ >>> op.inputs.requested_location(my_requested_location)
+ """
+ return self._requested_location
+
+ @property
+ def read_cyclic(self):
+ """Allows to connect read_cyclic input to the operator.
+
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+
+ Parameters
+ ----------
+ my_read_cyclic : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Z()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> # or
+ >>> op.inputs.read_cyclic(my_read_cyclic)
+ """
+ return self._read_cyclic
+
+ @property
+ def read_beams(self):
+ """Allows to connect read_beams input to the operator.
+
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Parameters
+ ----------
+ my_read_beams : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Z()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ >>> # or
+ >>> op.inputs.read_beams(my_read_beams)
+ """
+ return self._read_beams
+
+
+class OutputsElectricFluxDensityZ(_Outputs):
+ """Intermediate class used to get outputs from
+ electric_flux_density_Z operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Z()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(electric_flux_density_Z._spec().outputs, op)
+ self._fields_container = Output(
+ electric_flux_density_Z._spec().output_pin(0), 0, op
+ )
+ self._outputs.append(self._fields_container)
+
+ @property
+ def fields_container(self):
+ """Allows to get fields_container output of the operator
+
+ Returns
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.electric_flux_density_Z()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """ # noqa: E501
+ return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/magnetic_field_X.py b/src/ansys/dpf/core/operators/result/magnetic_field_X.py
new file mode 100644
index 0000000000..97131fb60f
--- /dev/null
+++ b/src/ansys/dpf/core/operators/result/magnetic_field_X.py
@@ -0,0 +1,653 @@
+"""
+magnetic_field_X
+
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class magnetic_field_X(Operator):
+ """Read/compute Magnetic Field X component of the vector (1st component)
+ by calling the readers defined by the datasources. Regarding the
+ requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
+ Parameters
+ ----------
+ time_scoping : Scoping or int or float or Field, optional
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+ mesh_scoping : ScopingsContainer or Scoping, optional
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+ fields_container : FieldsContainer, optional
+ Fieldscontainer already allocated modified
+ inplace
+ streams_container : StreamsContainer, optional
+ Result file container allowed to be kept open
+ to cache data
+ data_sources : DataSources
+ Result file path container, used if no
+ streams are set
+ bool_rotate_to_global : bool, optional
+ If true the field is rotated to global
+ coordinate system (default true)
+ mesh : MeshedRegion or MeshesContainer, optional
+ Prevents from reading the mesh in the result
+ files
+ requested_location : str, optional
+ Requested location, default is nodal
+ read_cyclic : int, optional
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+ read_beams : bool, optional
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Returns
+ -------
+ fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.result.magnetic_field_X()
+
+ >>> # Make input connections
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.result.magnetic_field_X(
+ ... time_scoping=my_time_scoping,
+ ... mesh_scoping=my_mesh_scoping,
+ ... fields_container=my_fields_container,
+ ... streams_container=my_streams_container,
+ ... data_sources=my_data_sources,
+ ... bool_rotate_to_global=my_bool_rotate_to_global,
+ ... mesh=my_mesh,
+ ... requested_location=my_requested_location,
+ ... read_cyclic=my_read_cyclic,
+ ... read_beams=my_read_beams,
+ ... )
+
+ >>> # Get output data
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(
+ self,
+ time_scoping=None,
+ mesh_scoping=None,
+ fields_container=None,
+ streams_container=None,
+ data_sources=None,
+ bool_rotate_to_global=None,
+ mesh=None,
+ requested_location=None,
+ read_cyclic=None,
+ read_beams=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(name="MFX", config=config, server=server)
+ self._inputs = InputsMagneticFieldX(self)
+ self._outputs = OutputsMagneticFieldX(self)
+ if time_scoping is not None:
+ self.inputs.time_scoping.connect(time_scoping)
+ if mesh_scoping is not None:
+ self.inputs.mesh_scoping.connect(mesh_scoping)
+ if fields_container is not None:
+ self.inputs.fields_container.connect(fields_container)
+ if streams_container is not None:
+ self.inputs.streams_container.connect(streams_container)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+ if bool_rotate_to_global is not None:
+ self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global)
+ if mesh is not None:
+ self.inputs.mesh.connect(mesh)
+ if requested_location is not None:
+ self.inputs.requested_location.connect(requested_location)
+ if read_cyclic is not None:
+ self.inputs.read_cyclic.connect(read_cyclic)
+ if read_beams is not None:
+ self.inputs.read_beams.connect(read_beams)
+
+ @staticmethod
+ def _spec():
+ description = """Read/compute Magnetic Field X component of the vector (1st component)
+ by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh
+ scoping, the result location can be
+ Nodal/ElementalNodal/Elemental."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 0: PinSpecification(
+ name="time_scoping",
+ type_names=[
+ "scoping",
+ "int32",
+ "vector",
+ "double",
+ "field",
+ "vector",
+ ],
+ optional=True,
+ document="""Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.""",
+ ),
+ 1: PinSpecification(
+ name="mesh_scoping",
+ type_names=["scopings_container", "scoping"],
+ optional=True,
+ document="""Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains""",
+ ),
+ 2: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=True,
+ document="""Fieldscontainer already allocated modified
+ inplace""",
+ ),
+ 3: PinSpecification(
+ name="streams_container",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set""",
+ ),
+ 5: PinSpecification(
+ name="bool_rotate_to_global",
+ type_names=["bool"],
+ optional=True,
+ document="""If true the field is rotated to global
+ coordinate system (default true)""",
+ ),
+ 7: PinSpecification(
+ name="mesh",
+ type_names=["abstract_meshed_region", "meshes_container"],
+ optional=True,
+ document="""Prevents from reading the mesh in the result
+ files""",
+ ),
+ 9: PinSpecification(
+ name="requested_location",
+ type_names=["string"],
+ optional=True,
+ document="""Requested location, default is nodal""",
+ ),
+ 14: PinSpecification(
+ name="read_cyclic",
+ type_names=["enum dataProcessing::ECyclicReading", "int32"],
+ optional=True,
+ document="""If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)""",
+ ),
+ 22: PinSpecification(
+ name="read_beams",
+ type_names=["bool"],
+ optional=True,
+ document="""Elemental nodal beam results are read if this
+ pin is set to true (default is false)""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(name="MFX", server=server)
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsMagneticFieldX
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsMagneticFieldX
+ """
+ return super().outputs
+
+
+class InputsMagneticFieldX(_Inputs):
+ """Intermediate class used to connect user inputs to
+ magnetic_field_X operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_X()
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(magnetic_field_X._spec().inputs, op)
+ self._time_scoping = Input(magnetic_field_X._spec().input_pin(0), 0, op, -1)
+ self._inputs.append(self._time_scoping)
+ self._mesh_scoping = Input(magnetic_field_X._spec().input_pin(1), 1, op, -1)
+ self._inputs.append(self._mesh_scoping)
+ self._fields_container = Input(magnetic_field_X._spec().input_pin(2), 2, op, -1)
+ self._inputs.append(self._fields_container)
+ self._streams_container = Input(
+ magnetic_field_X._spec().input_pin(3), 3, op, -1
+ )
+ self._inputs.append(self._streams_container)
+ self._data_sources = Input(magnetic_field_X._spec().input_pin(4), 4, op, -1)
+ self._inputs.append(self._data_sources)
+ self._bool_rotate_to_global = Input(
+ magnetic_field_X._spec().input_pin(5), 5, op, -1
+ )
+ self._inputs.append(self._bool_rotate_to_global)
+ self._mesh = Input(magnetic_field_X._spec().input_pin(7), 7, op, -1)
+ self._inputs.append(self._mesh)
+ self._requested_location = Input(
+ magnetic_field_X._spec().input_pin(9), 9, op, -1
+ )
+ self._inputs.append(self._requested_location)
+ self._read_cyclic = Input(magnetic_field_X._spec().input_pin(14), 14, op, -1)
+ self._inputs.append(self._read_cyclic)
+ self._read_beams = Input(magnetic_field_X._spec().input_pin(22), 22, op, -1)
+ self._inputs.append(self._read_beams)
+
+ @property
+ def time_scoping(self):
+ """Allows to connect time_scoping input to the operator.
+
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+
+ Parameters
+ ----------
+ my_time_scoping : Scoping or int or float or Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_X()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> # or
+ >>> op.inputs.time_scoping(my_time_scoping)
+ """
+ return self._time_scoping
+
+ @property
+ def mesh_scoping(self):
+ """Allows to connect mesh_scoping input to the operator.
+
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+
+ Parameters
+ ----------
+ my_mesh_scoping : ScopingsContainer or Scoping
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_X()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> # or
+ >>> op.inputs.mesh_scoping(my_mesh_scoping)
+ """
+ return self._mesh_scoping
+
+ @property
+ def fields_container(self):
+ """Allows to connect fields_container input to the operator.
+
+ Fieldscontainer already allocated modified
+ inplace
+
+ Parameters
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_X()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> # or
+ >>> op.inputs.fields_container(my_fields_container)
+ """
+ return self._fields_container
+
+ @property
+ def streams_container(self):
+ """Allows to connect streams_container input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data
+
+ Parameters
+ ----------
+ my_streams_container : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_X()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> # or
+ >>> op.inputs.streams_container(my_streams_container)
+ """
+ return self._streams_container
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_X()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+ @property
+ def bool_rotate_to_global(self):
+ """Allows to connect bool_rotate_to_global input to the operator.
+
+ If true the field is rotated to global
+ coordinate system (default true)
+
+ Parameters
+ ----------
+ my_bool_rotate_to_global : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_X()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> # or
+ >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global)
+ """
+ return self._bool_rotate_to_global
+
+ @property
+ def mesh(self):
+ """Allows to connect mesh input to the operator.
+
+ Prevents from reading the mesh in the result
+ files
+
+ Parameters
+ ----------
+ my_mesh : MeshedRegion or MeshesContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_X()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> # or
+ >>> op.inputs.mesh(my_mesh)
+ """
+ return self._mesh
+
+ @property
+ def requested_location(self):
+ """Allows to connect requested_location input to the operator.
+
+ Requested location, default is nodal
+
+ Parameters
+ ----------
+ my_requested_location : str
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_X()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> # or
+ >>> op.inputs.requested_location(my_requested_location)
+ """
+ return self._requested_location
+
+ @property
+ def read_cyclic(self):
+ """Allows to connect read_cyclic input to the operator.
+
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+
+ Parameters
+ ----------
+ my_read_cyclic : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_X()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> # or
+ >>> op.inputs.read_cyclic(my_read_cyclic)
+ """
+ return self._read_cyclic
+
+ @property
+ def read_beams(self):
+ """Allows to connect read_beams input to the operator.
+
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Parameters
+ ----------
+ my_read_beams : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_X()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ >>> # or
+ >>> op.inputs.read_beams(my_read_beams)
+ """
+ return self._read_beams
+
+
+class OutputsMagneticFieldX(_Outputs):
+ """Intermediate class used to get outputs from
+ magnetic_field_X operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_X()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(magnetic_field_X._spec().outputs, op)
+ self._fields_container = Output(magnetic_field_X._spec().output_pin(0), 0, op)
+ self._outputs.append(self._fields_container)
+
+ @property
+ def fields_container(self):
+ """Allows to get fields_container output of the operator
+
+ Returns
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_X()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """ # noqa: E501
+ return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/magnetic_field_Y.py b/src/ansys/dpf/core/operators/result/magnetic_field_Y.py
new file mode 100644
index 0000000000..76f61b74c1
--- /dev/null
+++ b/src/ansys/dpf/core/operators/result/magnetic_field_Y.py
@@ -0,0 +1,653 @@
+"""
+magnetic_field_Y
+
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class magnetic_field_Y(Operator):
+ """Read/compute Magnetic Field Y component of the vector (2nd component)
+ by calling the readers defined by the datasources. Regarding the
+ requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
+ Parameters
+ ----------
+ time_scoping : Scoping or int or float or Field, optional
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+ mesh_scoping : ScopingsContainer or Scoping, optional
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+ fields_container : FieldsContainer, optional
+ Fieldscontainer already allocated modified
+ inplace
+ streams_container : StreamsContainer, optional
+ Result file container allowed to be kept open
+ to cache data
+ data_sources : DataSources
+ Result file path container, used if no
+ streams are set
+ bool_rotate_to_global : bool, optional
+ If true the field is rotated to global
+ coordinate system (default true)
+ mesh : MeshedRegion or MeshesContainer, optional
+ Prevents from reading the mesh in the result
+ files
+ requested_location : str, optional
+ Requested location, default is nodal
+ read_cyclic : int, optional
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+ read_beams : bool, optional
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Returns
+ -------
+ fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.result.magnetic_field_Y()
+
+ >>> # Make input connections
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.result.magnetic_field_Y(
+ ... time_scoping=my_time_scoping,
+ ... mesh_scoping=my_mesh_scoping,
+ ... fields_container=my_fields_container,
+ ... streams_container=my_streams_container,
+ ... data_sources=my_data_sources,
+ ... bool_rotate_to_global=my_bool_rotate_to_global,
+ ... mesh=my_mesh,
+ ... requested_location=my_requested_location,
+ ... read_cyclic=my_read_cyclic,
+ ... read_beams=my_read_beams,
+ ... )
+
+ >>> # Get output data
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(
+ self,
+ time_scoping=None,
+ mesh_scoping=None,
+ fields_container=None,
+ streams_container=None,
+ data_sources=None,
+ bool_rotate_to_global=None,
+ mesh=None,
+ requested_location=None,
+ read_cyclic=None,
+ read_beams=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(name="MFY", config=config, server=server)
+ self._inputs = InputsMagneticFieldY(self)
+ self._outputs = OutputsMagneticFieldY(self)
+ if time_scoping is not None:
+ self.inputs.time_scoping.connect(time_scoping)
+ if mesh_scoping is not None:
+ self.inputs.mesh_scoping.connect(mesh_scoping)
+ if fields_container is not None:
+ self.inputs.fields_container.connect(fields_container)
+ if streams_container is not None:
+ self.inputs.streams_container.connect(streams_container)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+ if bool_rotate_to_global is not None:
+ self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global)
+ if mesh is not None:
+ self.inputs.mesh.connect(mesh)
+ if requested_location is not None:
+ self.inputs.requested_location.connect(requested_location)
+ if read_cyclic is not None:
+ self.inputs.read_cyclic.connect(read_cyclic)
+ if read_beams is not None:
+ self.inputs.read_beams.connect(read_beams)
+
+ @staticmethod
+ def _spec():
+ description = """Read/compute Magnetic Field Y component of the vector (2nd component)
+ by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh
+ scoping, the result location can be
+ Nodal/ElementalNodal/Elemental."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 0: PinSpecification(
+ name="time_scoping",
+ type_names=[
+ "scoping",
+ "int32",
+ "vector",
+ "double",
+ "field",
+ "vector",
+ ],
+ optional=True,
+ document="""Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.""",
+ ),
+ 1: PinSpecification(
+ name="mesh_scoping",
+ type_names=["scopings_container", "scoping"],
+ optional=True,
+ document="""Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains""",
+ ),
+ 2: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=True,
+ document="""Fieldscontainer already allocated modified
+ inplace""",
+ ),
+ 3: PinSpecification(
+ name="streams_container",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set""",
+ ),
+ 5: PinSpecification(
+ name="bool_rotate_to_global",
+ type_names=["bool"],
+ optional=True,
+ document="""If true the field is rotated to global
+ coordinate system (default true)""",
+ ),
+ 7: PinSpecification(
+ name="mesh",
+ type_names=["abstract_meshed_region", "meshes_container"],
+ optional=True,
+ document="""Prevents from reading the mesh in the result
+ files""",
+ ),
+ 9: PinSpecification(
+ name="requested_location",
+ type_names=["string"],
+ optional=True,
+ document="""Requested location, default is nodal""",
+ ),
+ 14: PinSpecification(
+ name="read_cyclic",
+ type_names=["enum dataProcessing::ECyclicReading", "int32"],
+ optional=True,
+ document="""If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)""",
+ ),
+ 22: PinSpecification(
+ name="read_beams",
+ type_names=["bool"],
+ optional=True,
+ document="""Elemental nodal beam results are read if this
+ pin is set to true (default is false)""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(name="MFY", server=server)
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsMagneticFieldY
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsMagneticFieldY
+ """
+ return super().outputs
+
+
+class InputsMagneticFieldY(_Inputs):
+ """Intermediate class used to connect user inputs to
+ magnetic_field_Y operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Y()
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(magnetic_field_Y._spec().inputs, op)
+ self._time_scoping = Input(magnetic_field_Y._spec().input_pin(0), 0, op, -1)
+ self._inputs.append(self._time_scoping)
+ self._mesh_scoping = Input(magnetic_field_Y._spec().input_pin(1), 1, op, -1)
+ self._inputs.append(self._mesh_scoping)
+ self._fields_container = Input(magnetic_field_Y._spec().input_pin(2), 2, op, -1)
+ self._inputs.append(self._fields_container)
+ self._streams_container = Input(
+ magnetic_field_Y._spec().input_pin(3), 3, op, -1
+ )
+ self._inputs.append(self._streams_container)
+ self._data_sources = Input(magnetic_field_Y._spec().input_pin(4), 4, op, -1)
+ self._inputs.append(self._data_sources)
+ self._bool_rotate_to_global = Input(
+ magnetic_field_Y._spec().input_pin(5), 5, op, -1
+ )
+ self._inputs.append(self._bool_rotate_to_global)
+ self._mesh = Input(magnetic_field_Y._spec().input_pin(7), 7, op, -1)
+ self._inputs.append(self._mesh)
+ self._requested_location = Input(
+ magnetic_field_Y._spec().input_pin(9), 9, op, -1
+ )
+ self._inputs.append(self._requested_location)
+ self._read_cyclic = Input(magnetic_field_Y._spec().input_pin(14), 14, op, -1)
+ self._inputs.append(self._read_cyclic)
+ self._read_beams = Input(magnetic_field_Y._spec().input_pin(22), 22, op, -1)
+ self._inputs.append(self._read_beams)
+
+ @property
+ def time_scoping(self):
+ """Allows to connect time_scoping input to the operator.
+
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+
+ Parameters
+ ----------
+ my_time_scoping : Scoping or int or float or Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Y()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> # or
+ >>> op.inputs.time_scoping(my_time_scoping)
+ """
+ return self._time_scoping
+
+ @property
+ def mesh_scoping(self):
+ """Allows to connect mesh_scoping input to the operator.
+
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+
+ Parameters
+ ----------
+ my_mesh_scoping : ScopingsContainer or Scoping
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Y()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> # or
+ >>> op.inputs.mesh_scoping(my_mesh_scoping)
+ """
+ return self._mesh_scoping
+
+ @property
+ def fields_container(self):
+ """Allows to connect fields_container input to the operator.
+
+ Fieldscontainer already allocated modified
+ inplace
+
+ Parameters
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Y()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> # or
+ >>> op.inputs.fields_container(my_fields_container)
+ """
+ return self._fields_container
+
+ @property
+ def streams_container(self):
+ """Allows to connect streams_container input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data
+
+ Parameters
+ ----------
+ my_streams_container : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Y()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> # or
+ >>> op.inputs.streams_container(my_streams_container)
+ """
+ return self._streams_container
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Y()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+ @property
+ def bool_rotate_to_global(self):
+ """Allows to connect bool_rotate_to_global input to the operator.
+
+ If true the field is rotated to global
+ coordinate system (default true)
+
+ Parameters
+ ----------
+ my_bool_rotate_to_global : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Y()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> # or
+ >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global)
+ """
+ return self._bool_rotate_to_global
+
+ @property
+ def mesh(self):
+ """Allows to connect mesh input to the operator.
+
+ Prevents from reading the mesh in the result
+ files
+
+ Parameters
+ ----------
+ my_mesh : MeshedRegion or MeshesContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Y()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> # or
+ >>> op.inputs.mesh(my_mesh)
+ """
+ return self._mesh
+
+ @property
+ def requested_location(self):
+ """Allows to connect requested_location input to the operator.
+
+ Requested location, default is nodal
+
+ Parameters
+ ----------
+ my_requested_location : str
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Y()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> # or
+ >>> op.inputs.requested_location(my_requested_location)
+ """
+ return self._requested_location
+
+ @property
+ def read_cyclic(self):
+ """Allows to connect read_cyclic input to the operator.
+
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+
+ Parameters
+ ----------
+ my_read_cyclic : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Y()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> # or
+ >>> op.inputs.read_cyclic(my_read_cyclic)
+ """
+ return self._read_cyclic
+
+ @property
+ def read_beams(self):
+ """Allows to connect read_beams input to the operator.
+
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Parameters
+ ----------
+ my_read_beams : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Y()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ >>> # or
+ >>> op.inputs.read_beams(my_read_beams)
+ """
+ return self._read_beams
+
+
+class OutputsMagneticFieldY(_Outputs):
+ """Intermediate class used to get outputs from
+ magnetic_field_Y operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Y()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(magnetic_field_Y._spec().outputs, op)
+ self._fields_container = Output(magnetic_field_Y._spec().output_pin(0), 0, op)
+ self._outputs.append(self._fields_container)
+
+ @property
+ def fields_container(self):
+ """Allows to get fields_container output of the operator
+
+ Returns
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Y()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """ # noqa: E501
+ return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/magnetic_field_Z.py b/src/ansys/dpf/core/operators/result/magnetic_field_Z.py
new file mode 100644
index 0000000000..996c1ac01c
--- /dev/null
+++ b/src/ansys/dpf/core/operators/result/magnetic_field_Z.py
@@ -0,0 +1,653 @@
+"""
+magnetic_field_Z
+
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class magnetic_field_Z(Operator):
+ """Read/compute Magnetic Field Z component of the vector (3rd component)
+ by calling the readers defined by the datasources. Regarding the
+ requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
+ Parameters
+ ----------
+ time_scoping : Scoping or int or float or Field, optional
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+ mesh_scoping : ScopingsContainer or Scoping, optional
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+ fields_container : FieldsContainer, optional
+ Fieldscontainer already allocated modified
+ inplace
+ streams_container : StreamsContainer, optional
+ Result file container allowed to be kept open
+ to cache data
+ data_sources : DataSources
+ Result file path container, used if no
+ streams are set
+ bool_rotate_to_global : bool, optional
+ If true the field is rotated to global
+ coordinate system (default true)
+ mesh : MeshedRegion or MeshesContainer, optional
+ Prevents from reading the mesh in the result
+ files
+ requested_location : str, optional
+ Requested location, default is nodal
+ read_cyclic : int, optional
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+ read_beams : bool, optional
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Returns
+ -------
+ fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.result.magnetic_field_Z()
+
+ >>> # Make input connections
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.result.magnetic_field_Z(
+ ... time_scoping=my_time_scoping,
+ ... mesh_scoping=my_mesh_scoping,
+ ... fields_container=my_fields_container,
+ ... streams_container=my_streams_container,
+ ... data_sources=my_data_sources,
+ ... bool_rotate_to_global=my_bool_rotate_to_global,
+ ... mesh=my_mesh,
+ ... requested_location=my_requested_location,
+ ... read_cyclic=my_read_cyclic,
+ ... read_beams=my_read_beams,
+ ... )
+
+ >>> # Get output data
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(
+ self,
+ time_scoping=None,
+ mesh_scoping=None,
+ fields_container=None,
+ streams_container=None,
+ data_sources=None,
+ bool_rotate_to_global=None,
+ mesh=None,
+ requested_location=None,
+ read_cyclic=None,
+ read_beams=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(name="MFZ", config=config, server=server)
+ self._inputs = InputsMagneticFieldZ(self)
+ self._outputs = OutputsMagneticFieldZ(self)
+ if time_scoping is not None:
+ self.inputs.time_scoping.connect(time_scoping)
+ if mesh_scoping is not None:
+ self.inputs.mesh_scoping.connect(mesh_scoping)
+ if fields_container is not None:
+ self.inputs.fields_container.connect(fields_container)
+ if streams_container is not None:
+ self.inputs.streams_container.connect(streams_container)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+ if bool_rotate_to_global is not None:
+ self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global)
+ if mesh is not None:
+ self.inputs.mesh.connect(mesh)
+ if requested_location is not None:
+ self.inputs.requested_location.connect(requested_location)
+ if read_cyclic is not None:
+ self.inputs.read_cyclic.connect(read_cyclic)
+ if read_beams is not None:
+ self.inputs.read_beams.connect(read_beams)
+
+ @staticmethod
+ def _spec():
+ description = """Read/compute Magnetic Field Z component of the vector (3rd component)
+ by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh
+ scoping, the result location can be
+ Nodal/ElementalNodal/Elemental."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 0: PinSpecification(
+ name="time_scoping",
+ type_names=[
+ "scoping",
+ "int32",
+ "vector",
+ "double",
+ "field",
+ "vector",
+ ],
+ optional=True,
+ document="""Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.""",
+ ),
+ 1: PinSpecification(
+ name="mesh_scoping",
+ type_names=["scopings_container", "scoping"],
+ optional=True,
+ document="""Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains""",
+ ),
+ 2: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=True,
+ document="""Fieldscontainer already allocated modified
+ inplace""",
+ ),
+ 3: PinSpecification(
+ name="streams_container",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set""",
+ ),
+ 5: PinSpecification(
+ name="bool_rotate_to_global",
+ type_names=["bool"],
+ optional=True,
+ document="""If true the field is rotated to global
+ coordinate system (default true)""",
+ ),
+ 7: PinSpecification(
+ name="mesh",
+ type_names=["abstract_meshed_region", "meshes_container"],
+ optional=True,
+ document="""Prevents from reading the mesh in the result
+ files""",
+ ),
+ 9: PinSpecification(
+ name="requested_location",
+ type_names=["string"],
+ optional=True,
+ document="""Requested location, default is nodal""",
+ ),
+ 14: PinSpecification(
+ name="read_cyclic",
+ type_names=["enum dataProcessing::ECyclicReading", "int32"],
+ optional=True,
+ document="""If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)""",
+ ),
+ 22: PinSpecification(
+ name="read_beams",
+ type_names=["bool"],
+ optional=True,
+ document="""Elemental nodal beam results are read if this
+ pin is set to true (default is false)""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(name="MFZ", server=server)
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsMagneticFieldZ
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsMagneticFieldZ
+ """
+ return super().outputs
+
+
+class InputsMagneticFieldZ(_Inputs):
+ """Intermediate class used to connect user inputs to
+ magnetic_field_Z operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Z()
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(magnetic_field_Z._spec().inputs, op)
+ self._time_scoping = Input(magnetic_field_Z._spec().input_pin(0), 0, op, -1)
+ self._inputs.append(self._time_scoping)
+ self._mesh_scoping = Input(magnetic_field_Z._spec().input_pin(1), 1, op, -1)
+ self._inputs.append(self._mesh_scoping)
+ self._fields_container = Input(magnetic_field_Z._spec().input_pin(2), 2, op, -1)
+ self._inputs.append(self._fields_container)
+ self._streams_container = Input(
+ magnetic_field_Z._spec().input_pin(3), 3, op, -1
+ )
+ self._inputs.append(self._streams_container)
+ self._data_sources = Input(magnetic_field_Z._spec().input_pin(4), 4, op, -1)
+ self._inputs.append(self._data_sources)
+ self._bool_rotate_to_global = Input(
+ magnetic_field_Z._spec().input_pin(5), 5, op, -1
+ )
+ self._inputs.append(self._bool_rotate_to_global)
+ self._mesh = Input(magnetic_field_Z._spec().input_pin(7), 7, op, -1)
+ self._inputs.append(self._mesh)
+ self._requested_location = Input(
+ magnetic_field_Z._spec().input_pin(9), 9, op, -1
+ )
+ self._inputs.append(self._requested_location)
+ self._read_cyclic = Input(magnetic_field_Z._spec().input_pin(14), 14, op, -1)
+ self._inputs.append(self._read_cyclic)
+ self._read_beams = Input(magnetic_field_Z._spec().input_pin(22), 22, op, -1)
+ self._inputs.append(self._read_beams)
+
+ @property
+ def time_scoping(self):
+ """Allows to connect time_scoping input to the operator.
+
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+
+ Parameters
+ ----------
+ my_time_scoping : Scoping or int or float or Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Z()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> # or
+ >>> op.inputs.time_scoping(my_time_scoping)
+ """
+ return self._time_scoping
+
+ @property
+ def mesh_scoping(self):
+ """Allows to connect mesh_scoping input to the operator.
+
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+
+ Parameters
+ ----------
+ my_mesh_scoping : ScopingsContainer or Scoping
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Z()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> # or
+ >>> op.inputs.mesh_scoping(my_mesh_scoping)
+ """
+ return self._mesh_scoping
+
+ @property
+ def fields_container(self):
+ """Allows to connect fields_container input to the operator.
+
+ Fieldscontainer already allocated modified
+ inplace
+
+ Parameters
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Z()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> # or
+ >>> op.inputs.fields_container(my_fields_container)
+ """
+ return self._fields_container
+
+ @property
+ def streams_container(self):
+ """Allows to connect streams_container input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data
+
+ Parameters
+ ----------
+ my_streams_container : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Z()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> # or
+ >>> op.inputs.streams_container(my_streams_container)
+ """
+ return self._streams_container
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Z()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+ @property
+ def bool_rotate_to_global(self):
+ """Allows to connect bool_rotate_to_global input to the operator.
+
+ If true the field is rotated to global
+ coordinate system (default true)
+
+ Parameters
+ ----------
+ my_bool_rotate_to_global : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Z()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> # or
+ >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global)
+ """
+ return self._bool_rotate_to_global
+
+ @property
+ def mesh(self):
+ """Allows to connect mesh input to the operator.
+
+ Prevents from reading the mesh in the result
+ files
+
+ Parameters
+ ----------
+ my_mesh : MeshedRegion or MeshesContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Z()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> # or
+ >>> op.inputs.mesh(my_mesh)
+ """
+ return self._mesh
+
+ @property
+ def requested_location(self):
+ """Allows to connect requested_location input to the operator.
+
+ Requested location, default is nodal
+
+ Parameters
+ ----------
+ my_requested_location : str
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Z()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> # or
+ >>> op.inputs.requested_location(my_requested_location)
+ """
+ return self._requested_location
+
+ @property
+ def read_cyclic(self):
+ """Allows to connect read_cyclic input to the operator.
+
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+
+ Parameters
+ ----------
+ my_read_cyclic : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Z()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> # or
+ >>> op.inputs.read_cyclic(my_read_cyclic)
+ """
+ return self._read_cyclic
+
+ @property
+ def read_beams(self):
+ """Allows to connect read_beams input to the operator.
+
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Parameters
+ ----------
+ my_read_beams : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Z()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ >>> # or
+ >>> op.inputs.read_beams(my_read_beams)
+ """
+ return self._read_beams
+
+
+class OutputsMagneticFieldZ(_Outputs):
+ """Intermediate class used to get outputs from
+ magnetic_field_Z operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Z()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(magnetic_field_Z._spec().outputs, op)
+ self._fields_container = Output(magnetic_field_Z._spec().output_pin(0), 0, op)
+ self._outputs.append(self._fields_container)
+
+ @property
+ def fields_container(self):
+ """Allows to get fields_container output of the operator
+
+ Returns
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_field_Z()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """ # noqa: E501
+ return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/magnetic_flux_density_X.py b/src/ansys/dpf/core/operators/result/magnetic_flux_density_X.py
new file mode 100644
index 0000000000..481d3ee459
--- /dev/null
+++ b/src/ansys/dpf/core/operators/result/magnetic_flux_density_X.py
@@ -0,0 +1,667 @@
+"""
+magnetic_flux_density_X
+
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class magnetic_flux_density_X(Operator):
+ """Read/compute Magnetic Flux Density X component of the vector (1st
+ component) by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh scoping, the
+ result location can be Nodal/ElementalNodal/Elemental.
+
+ Parameters
+ ----------
+ time_scoping : Scoping or int or float or Field, optional
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+ mesh_scoping : ScopingsContainer or Scoping, optional
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+ fields_container : FieldsContainer, optional
+ Fieldscontainer already allocated modified
+ inplace
+ streams_container : StreamsContainer, optional
+ Result file container allowed to be kept open
+ to cache data
+ data_sources : DataSources
+ Result file path container, used if no
+ streams are set
+ bool_rotate_to_global : bool, optional
+ If true the field is rotated to global
+ coordinate system (default true)
+ mesh : MeshedRegion or MeshesContainer, optional
+ Prevents from reading the mesh in the result
+ files
+ requested_location : str, optional
+ Requested location, default is nodal
+ read_cyclic : int, optional
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+ read_beams : bool, optional
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Returns
+ -------
+ fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.result.magnetic_flux_density_X()
+
+ >>> # Make input connections
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.result.magnetic_flux_density_X(
+ ... time_scoping=my_time_scoping,
+ ... mesh_scoping=my_mesh_scoping,
+ ... fields_container=my_fields_container,
+ ... streams_container=my_streams_container,
+ ... data_sources=my_data_sources,
+ ... bool_rotate_to_global=my_bool_rotate_to_global,
+ ... mesh=my_mesh,
+ ... requested_location=my_requested_location,
+ ... read_cyclic=my_read_cyclic,
+ ... read_beams=my_read_beams,
+ ... )
+
+ >>> # Get output data
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(
+ self,
+ time_scoping=None,
+ mesh_scoping=None,
+ fields_container=None,
+ streams_container=None,
+ data_sources=None,
+ bool_rotate_to_global=None,
+ mesh=None,
+ requested_location=None,
+ read_cyclic=None,
+ read_beams=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(name="MFDX", config=config, server=server)
+ self._inputs = InputsMagneticFluxDensityX(self)
+ self._outputs = OutputsMagneticFluxDensityX(self)
+ if time_scoping is not None:
+ self.inputs.time_scoping.connect(time_scoping)
+ if mesh_scoping is not None:
+ self.inputs.mesh_scoping.connect(mesh_scoping)
+ if fields_container is not None:
+ self.inputs.fields_container.connect(fields_container)
+ if streams_container is not None:
+ self.inputs.streams_container.connect(streams_container)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+ if bool_rotate_to_global is not None:
+ self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global)
+ if mesh is not None:
+ self.inputs.mesh.connect(mesh)
+ if requested_location is not None:
+ self.inputs.requested_location.connect(requested_location)
+ if read_cyclic is not None:
+ self.inputs.read_cyclic.connect(read_cyclic)
+ if read_beams is not None:
+ self.inputs.read_beams.connect(read_beams)
+
+ @staticmethod
+ def _spec():
+ description = """Read/compute Magnetic Flux Density X component of the vector (1st
+ component) by calling the readers defined by the
+ datasources. Regarding the requested location and the
+ input mesh scoping, the result location can be
+ Nodal/ElementalNodal/Elemental."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 0: PinSpecification(
+ name="time_scoping",
+ type_names=[
+ "scoping",
+ "int32",
+ "vector",
+ "double",
+ "field",
+ "vector",
+ ],
+ optional=True,
+ document="""Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.""",
+ ),
+ 1: PinSpecification(
+ name="mesh_scoping",
+ type_names=["scopings_container", "scoping"],
+ optional=True,
+ document="""Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains""",
+ ),
+ 2: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=True,
+ document="""Fieldscontainer already allocated modified
+ inplace""",
+ ),
+ 3: PinSpecification(
+ name="streams_container",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set""",
+ ),
+ 5: PinSpecification(
+ name="bool_rotate_to_global",
+ type_names=["bool"],
+ optional=True,
+ document="""If true the field is rotated to global
+ coordinate system (default true)""",
+ ),
+ 7: PinSpecification(
+ name="mesh",
+ type_names=["abstract_meshed_region", "meshes_container"],
+ optional=True,
+ document="""Prevents from reading the mesh in the result
+ files""",
+ ),
+ 9: PinSpecification(
+ name="requested_location",
+ type_names=["string"],
+ optional=True,
+ document="""Requested location, default is nodal""",
+ ),
+ 14: PinSpecification(
+ name="read_cyclic",
+ type_names=["enum dataProcessing::ECyclicReading", "int32"],
+ optional=True,
+ document="""If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)""",
+ ),
+ 22: PinSpecification(
+ name="read_beams",
+ type_names=["bool"],
+ optional=True,
+ document="""Elemental nodal beam results are read if this
+ pin is set to true (default is false)""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(name="MFDX", server=server)
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsMagneticFluxDensityX
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsMagneticFluxDensityX
+ """
+ return super().outputs
+
+
+class InputsMagneticFluxDensityX(_Inputs):
+ """Intermediate class used to connect user inputs to
+ magnetic_flux_density_X operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_X()
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(magnetic_flux_density_X._spec().inputs, op)
+ self._time_scoping = Input(
+ magnetic_flux_density_X._spec().input_pin(0), 0, op, -1
+ )
+ self._inputs.append(self._time_scoping)
+ self._mesh_scoping = Input(
+ magnetic_flux_density_X._spec().input_pin(1), 1, op, -1
+ )
+ self._inputs.append(self._mesh_scoping)
+ self._fields_container = Input(
+ magnetic_flux_density_X._spec().input_pin(2), 2, op, -1
+ )
+ self._inputs.append(self._fields_container)
+ self._streams_container = Input(
+ magnetic_flux_density_X._spec().input_pin(3), 3, op, -1
+ )
+ self._inputs.append(self._streams_container)
+ self._data_sources = Input(
+ magnetic_flux_density_X._spec().input_pin(4), 4, op, -1
+ )
+ self._inputs.append(self._data_sources)
+ self._bool_rotate_to_global = Input(
+ magnetic_flux_density_X._spec().input_pin(5), 5, op, -1
+ )
+ self._inputs.append(self._bool_rotate_to_global)
+ self._mesh = Input(magnetic_flux_density_X._spec().input_pin(7), 7, op, -1)
+ self._inputs.append(self._mesh)
+ self._requested_location = Input(
+ magnetic_flux_density_X._spec().input_pin(9), 9, op, -1
+ )
+ self._inputs.append(self._requested_location)
+ self._read_cyclic = Input(
+ magnetic_flux_density_X._spec().input_pin(14), 14, op, -1
+ )
+ self._inputs.append(self._read_cyclic)
+ self._read_beams = Input(
+ magnetic_flux_density_X._spec().input_pin(22), 22, op, -1
+ )
+ self._inputs.append(self._read_beams)
+
+ @property
+ def time_scoping(self):
+ """Allows to connect time_scoping input to the operator.
+
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+
+ Parameters
+ ----------
+ my_time_scoping : Scoping or int or float or Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_X()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> # or
+ >>> op.inputs.time_scoping(my_time_scoping)
+ """
+ return self._time_scoping
+
+ @property
+ def mesh_scoping(self):
+ """Allows to connect mesh_scoping input to the operator.
+
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+
+ Parameters
+ ----------
+ my_mesh_scoping : ScopingsContainer or Scoping
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_X()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> # or
+ >>> op.inputs.mesh_scoping(my_mesh_scoping)
+ """
+ return self._mesh_scoping
+
+ @property
+ def fields_container(self):
+ """Allows to connect fields_container input to the operator.
+
+ Fieldscontainer already allocated modified
+ inplace
+
+ Parameters
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_X()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> # or
+ >>> op.inputs.fields_container(my_fields_container)
+ """
+ return self._fields_container
+
+ @property
+ def streams_container(self):
+ """Allows to connect streams_container input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data
+
+ Parameters
+ ----------
+ my_streams_container : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_X()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> # or
+ >>> op.inputs.streams_container(my_streams_container)
+ """
+ return self._streams_container
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_X()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+ @property
+ def bool_rotate_to_global(self):
+ """Allows to connect bool_rotate_to_global input to the operator.
+
+ If true the field is rotated to global
+ coordinate system (default true)
+
+ Parameters
+ ----------
+ my_bool_rotate_to_global : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_X()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> # or
+ >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global)
+ """
+ return self._bool_rotate_to_global
+
+ @property
+ def mesh(self):
+ """Allows to connect mesh input to the operator.
+
+ Prevents from reading the mesh in the result
+ files
+
+ Parameters
+ ----------
+ my_mesh : MeshedRegion or MeshesContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_X()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> # or
+ >>> op.inputs.mesh(my_mesh)
+ """
+ return self._mesh
+
+ @property
+ def requested_location(self):
+ """Allows to connect requested_location input to the operator.
+
+ Requested location, default is nodal
+
+ Parameters
+ ----------
+ my_requested_location : str
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_X()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> # or
+ >>> op.inputs.requested_location(my_requested_location)
+ """
+ return self._requested_location
+
+ @property
+ def read_cyclic(self):
+ """Allows to connect read_cyclic input to the operator.
+
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+
+ Parameters
+ ----------
+ my_read_cyclic : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_X()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> # or
+ >>> op.inputs.read_cyclic(my_read_cyclic)
+ """
+ return self._read_cyclic
+
+ @property
+ def read_beams(self):
+ """Allows to connect read_beams input to the operator.
+
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Parameters
+ ----------
+ my_read_beams : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_X()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ >>> # or
+ >>> op.inputs.read_beams(my_read_beams)
+ """
+ return self._read_beams
+
+
+class OutputsMagneticFluxDensityX(_Outputs):
+ """Intermediate class used to get outputs from
+ magnetic_flux_density_X operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_X()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(magnetic_flux_density_X._spec().outputs, op)
+ self._fields_container = Output(
+ magnetic_flux_density_X._spec().output_pin(0), 0, op
+ )
+ self._outputs.append(self._fields_container)
+
+ @property
+ def fields_container(self):
+ """Allows to get fields_container output of the operator
+
+ Returns
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_X()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """ # noqa: E501
+ return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/magnetic_flux_density_Y.py b/src/ansys/dpf/core/operators/result/magnetic_flux_density_Y.py
new file mode 100644
index 0000000000..b99d0f82e2
--- /dev/null
+++ b/src/ansys/dpf/core/operators/result/magnetic_flux_density_Y.py
@@ -0,0 +1,667 @@
+"""
+magnetic_flux_density_Y
+
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class magnetic_flux_density_Y(Operator):
+ """Read/compute Magnetic Flux Density Y component of the vector (2nd
+ component) by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh scoping, the
+ result location can be Nodal/ElementalNodal/Elemental.
+
+ Parameters
+ ----------
+ time_scoping : Scoping or int or float or Field, optional
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+ mesh_scoping : ScopingsContainer or Scoping, optional
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+ fields_container : FieldsContainer, optional
+ Fieldscontainer already allocated modified
+ inplace
+ streams_container : StreamsContainer, optional
+ Result file container allowed to be kept open
+ to cache data
+ data_sources : DataSources
+ Result file path container, used if no
+ streams are set
+ bool_rotate_to_global : bool, optional
+ If true the field is rotated to global
+ coordinate system (default true)
+ mesh : MeshedRegion or MeshesContainer, optional
+ Prevents from reading the mesh in the result
+ files
+ requested_location : str, optional
+ Requested location, default is nodal
+ read_cyclic : int, optional
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+ read_beams : bool, optional
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Returns
+ -------
+ fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.result.magnetic_flux_density_Y()
+
+ >>> # Make input connections
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.result.magnetic_flux_density_Y(
+ ... time_scoping=my_time_scoping,
+ ... mesh_scoping=my_mesh_scoping,
+ ... fields_container=my_fields_container,
+ ... streams_container=my_streams_container,
+ ... data_sources=my_data_sources,
+ ... bool_rotate_to_global=my_bool_rotate_to_global,
+ ... mesh=my_mesh,
+ ... requested_location=my_requested_location,
+ ... read_cyclic=my_read_cyclic,
+ ... read_beams=my_read_beams,
+ ... )
+
+ >>> # Get output data
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(
+ self,
+ time_scoping=None,
+ mesh_scoping=None,
+ fields_container=None,
+ streams_container=None,
+ data_sources=None,
+ bool_rotate_to_global=None,
+ mesh=None,
+ requested_location=None,
+ read_cyclic=None,
+ read_beams=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(name="MFDY", config=config, server=server)
+ self._inputs = InputsMagneticFluxDensityY(self)
+ self._outputs = OutputsMagneticFluxDensityY(self)
+ if time_scoping is not None:
+ self.inputs.time_scoping.connect(time_scoping)
+ if mesh_scoping is not None:
+ self.inputs.mesh_scoping.connect(mesh_scoping)
+ if fields_container is not None:
+ self.inputs.fields_container.connect(fields_container)
+ if streams_container is not None:
+ self.inputs.streams_container.connect(streams_container)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+ if bool_rotate_to_global is not None:
+ self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global)
+ if mesh is not None:
+ self.inputs.mesh.connect(mesh)
+ if requested_location is not None:
+ self.inputs.requested_location.connect(requested_location)
+ if read_cyclic is not None:
+ self.inputs.read_cyclic.connect(read_cyclic)
+ if read_beams is not None:
+ self.inputs.read_beams.connect(read_beams)
+
+ @staticmethod
+ def _spec():
+ description = """Read/compute Magnetic Flux Density Y component of the vector (2nd
+ component) by calling the readers defined by the
+ datasources. Regarding the requested location and the
+ input mesh scoping, the result location can be
+ Nodal/ElementalNodal/Elemental."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 0: PinSpecification(
+ name="time_scoping",
+ type_names=[
+ "scoping",
+ "int32",
+ "vector",
+ "double",
+ "field",
+ "vector",
+ ],
+ optional=True,
+ document="""Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.""",
+ ),
+ 1: PinSpecification(
+ name="mesh_scoping",
+ type_names=["scopings_container", "scoping"],
+ optional=True,
+ document="""Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains""",
+ ),
+ 2: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=True,
+ document="""Fieldscontainer already allocated modified
+ inplace""",
+ ),
+ 3: PinSpecification(
+ name="streams_container",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set""",
+ ),
+ 5: PinSpecification(
+ name="bool_rotate_to_global",
+ type_names=["bool"],
+ optional=True,
+ document="""If true the field is rotated to global
+ coordinate system (default true)""",
+ ),
+ 7: PinSpecification(
+ name="mesh",
+ type_names=["abstract_meshed_region", "meshes_container"],
+ optional=True,
+ document="""Prevents from reading the mesh in the result
+ files""",
+ ),
+ 9: PinSpecification(
+ name="requested_location",
+ type_names=["string"],
+ optional=True,
+ document="""Requested location, default is nodal""",
+ ),
+ 14: PinSpecification(
+ name="read_cyclic",
+ type_names=["enum dataProcessing::ECyclicReading", "int32"],
+ optional=True,
+ document="""If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)""",
+ ),
+ 22: PinSpecification(
+ name="read_beams",
+ type_names=["bool"],
+ optional=True,
+ document="""Elemental nodal beam results are read if this
+ pin is set to true (default is false)""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(name="MFDY", server=server)
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsMagneticFluxDensityY
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsMagneticFluxDensityY
+ """
+ return super().outputs
+
+
+class InputsMagneticFluxDensityY(_Inputs):
+ """Intermediate class used to connect user inputs to
+ magnetic_flux_density_Y operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Y()
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(magnetic_flux_density_Y._spec().inputs, op)
+ self._time_scoping = Input(
+ magnetic_flux_density_Y._spec().input_pin(0), 0, op, -1
+ )
+ self._inputs.append(self._time_scoping)
+ self._mesh_scoping = Input(
+ magnetic_flux_density_Y._spec().input_pin(1), 1, op, -1
+ )
+ self._inputs.append(self._mesh_scoping)
+ self._fields_container = Input(
+ magnetic_flux_density_Y._spec().input_pin(2), 2, op, -1
+ )
+ self._inputs.append(self._fields_container)
+ self._streams_container = Input(
+ magnetic_flux_density_Y._spec().input_pin(3), 3, op, -1
+ )
+ self._inputs.append(self._streams_container)
+ self._data_sources = Input(
+ magnetic_flux_density_Y._spec().input_pin(4), 4, op, -1
+ )
+ self._inputs.append(self._data_sources)
+ self._bool_rotate_to_global = Input(
+ magnetic_flux_density_Y._spec().input_pin(5), 5, op, -1
+ )
+ self._inputs.append(self._bool_rotate_to_global)
+ self._mesh = Input(magnetic_flux_density_Y._spec().input_pin(7), 7, op, -1)
+ self._inputs.append(self._mesh)
+ self._requested_location = Input(
+ magnetic_flux_density_Y._spec().input_pin(9), 9, op, -1
+ )
+ self._inputs.append(self._requested_location)
+ self._read_cyclic = Input(
+ magnetic_flux_density_Y._spec().input_pin(14), 14, op, -1
+ )
+ self._inputs.append(self._read_cyclic)
+ self._read_beams = Input(
+ magnetic_flux_density_Y._spec().input_pin(22), 22, op, -1
+ )
+ self._inputs.append(self._read_beams)
+
+ @property
+ def time_scoping(self):
+ """Allows to connect time_scoping input to the operator.
+
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+
+ Parameters
+ ----------
+ my_time_scoping : Scoping or int or float or Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Y()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> # or
+ >>> op.inputs.time_scoping(my_time_scoping)
+ """
+ return self._time_scoping
+
+ @property
+ def mesh_scoping(self):
+ """Allows to connect mesh_scoping input to the operator.
+
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+
+ Parameters
+ ----------
+ my_mesh_scoping : ScopingsContainer or Scoping
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Y()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> # or
+ >>> op.inputs.mesh_scoping(my_mesh_scoping)
+ """
+ return self._mesh_scoping
+
+ @property
+ def fields_container(self):
+ """Allows to connect fields_container input to the operator.
+
+ Fieldscontainer already allocated modified
+ inplace
+
+ Parameters
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Y()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> # or
+ >>> op.inputs.fields_container(my_fields_container)
+ """
+ return self._fields_container
+
+ @property
+ def streams_container(self):
+ """Allows to connect streams_container input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data
+
+ Parameters
+ ----------
+ my_streams_container : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Y()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> # or
+ >>> op.inputs.streams_container(my_streams_container)
+ """
+ return self._streams_container
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Y()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+ @property
+ def bool_rotate_to_global(self):
+ """Allows to connect bool_rotate_to_global input to the operator.
+
+ If true the field is rotated to global
+ coordinate system (default true)
+
+ Parameters
+ ----------
+ my_bool_rotate_to_global : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Y()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> # or
+ >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global)
+ """
+ return self._bool_rotate_to_global
+
+ @property
+ def mesh(self):
+ """Allows to connect mesh input to the operator.
+
+ Prevents from reading the mesh in the result
+ files
+
+ Parameters
+ ----------
+ my_mesh : MeshedRegion or MeshesContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Y()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> # or
+ >>> op.inputs.mesh(my_mesh)
+ """
+ return self._mesh
+
+ @property
+ def requested_location(self):
+ """Allows to connect requested_location input to the operator.
+
+ Requested location, default is nodal
+
+ Parameters
+ ----------
+ my_requested_location : str
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Y()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> # or
+ >>> op.inputs.requested_location(my_requested_location)
+ """
+ return self._requested_location
+
+ @property
+ def read_cyclic(self):
+ """Allows to connect read_cyclic input to the operator.
+
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+
+ Parameters
+ ----------
+ my_read_cyclic : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Y()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> # or
+ >>> op.inputs.read_cyclic(my_read_cyclic)
+ """
+ return self._read_cyclic
+
+ @property
+ def read_beams(self):
+ """Allows to connect read_beams input to the operator.
+
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Parameters
+ ----------
+ my_read_beams : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Y()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ >>> # or
+ >>> op.inputs.read_beams(my_read_beams)
+ """
+ return self._read_beams
+
+
+class OutputsMagneticFluxDensityY(_Outputs):
+ """Intermediate class used to get outputs from
+ magnetic_flux_density_Y operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Y()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(magnetic_flux_density_Y._spec().outputs, op)
+ self._fields_container = Output(
+ magnetic_flux_density_Y._spec().output_pin(0), 0, op
+ )
+ self._outputs.append(self._fields_container)
+
+ @property
+ def fields_container(self):
+ """Allows to get fields_container output of the operator
+
+ Returns
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Y()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """ # noqa: E501
+ return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/magnetic_flux_density_Z.py b/src/ansys/dpf/core/operators/result/magnetic_flux_density_Z.py
new file mode 100644
index 0000000000..075217ae30
--- /dev/null
+++ b/src/ansys/dpf/core/operators/result/magnetic_flux_density_Z.py
@@ -0,0 +1,667 @@
+"""
+magnetic_flux_density_Z
+
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class magnetic_flux_density_Z(Operator):
+ """Read/compute Magnetic Flux Density Z component of the vector (3rd
+ component) by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh scoping, the
+ result location can be Nodal/ElementalNodal/Elemental.
+
+ Parameters
+ ----------
+ time_scoping : Scoping or int or float or Field, optional
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+ mesh_scoping : ScopingsContainer or Scoping, optional
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+ fields_container : FieldsContainer, optional
+ Fieldscontainer already allocated modified
+ inplace
+ streams_container : StreamsContainer, optional
+ Result file container allowed to be kept open
+ to cache data
+ data_sources : DataSources
+ Result file path container, used if no
+ streams are set
+ bool_rotate_to_global : bool, optional
+ If true the field is rotated to global
+ coordinate system (default true)
+ mesh : MeshedRegion or MeshesContainer, optional
+ Prevents from reading the mesh in the result
+ files
+ requested_location : str, optional
+ Requested location, default is nodal
+ read_cyclic : int, optional
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+ read_beams : bool, optional
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Returns
+ -------
+ fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.result.magnetic_flux_density_Z()
+
+ >>> # Make input connections
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.result.magnetic_flux_density_Z(
+ ... time_scoping=my_time_scoping,
+ ... mesh_scoping=my_mesh_scoping,
+ ... fields_container=my_fields_container,
+ ... streams_container=my_streams_container,
+ ... data_sources=my_data_sources,
+ ... bool_rotate_to_global=my_bool_rotate_to_global,
+ ... mesh=my_mesh,
+ ... requested_location=my_requested_location,
+ ... read_cyclic=my_read_cyclic,
+ ... read_beams=my_read_beams,
+ ... )
+
+ >>> # Get output data
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(
+ self,
+ time_scoping=None,
+ mesh_scoping=None,
+ fields_container=None,
+ streams_container=None,
+ data_sources=None,
+ bool_rotate_to_global=None,
+ mesh=None,
+ requested_location=None,
+ read_cyclic=None,
+ read_beams=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(name="MFDZ", config=config, server=server)
+ self._inputs = InputsMagneticFluxDensityZ(self)
+ self._outputs = OutputsMagneticFluxDensityZ(self)
+ if time_scoping is not None:
+ self.inputs.time_scoping.connect(time_scoping)
+ if mesh_scoping is not None:
+ self.inputs.mesh_scoping.connect(mesh_scoping)
+ if fields_container is not None:
+ self.inputs.fields_container.connect(fields_container)
+ if streams_container is not None:
+ self.inputs.streams_container.connect(streams_container)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+ if bool_rotate_to_global is not None:
+ self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global)
+ if mesh is not None:
+ self.inputs.mesh.connect(mesh)
+ if requested_location is not None:
+ self.inputs.requested_location.connect(requested_location)
+ if read_cyclic is not None:
+ self.inputs.read_cyclic.connect(read_cyclic)
+ if read_beams is not None:
+ self.inputs.read_beams.connect(read_beams)
+
+ @staticmethod
+ def _spec():
+ description = """Read/compute Magnetic Flux Density Z component of the vector (3rd
+ component) by calling the readers defined by the
+ datasources. Regarding the requested location and the
+ input mesh scoping, the result location can be
+ Nodal/ElementalNodal/Elemental."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 0: PinSpecification(
+ name="time_scoping",
+ type_names=[
+ "scoping",
+ "int32",
+ "vector",
+ "double",
+ "field",
+ "vector",
+ ],
+ optional=True,
+ document="""Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.""",
+ ),
+ 1: PinSpecification(
+ name="mesh_scoping",
+ type_names=["scopings_container", "scoping"],
+ optional=True,
+ document="""Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains""",
+ ),
+ 2: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=True,
+ document="""Fieldscontainer already allocated modified
+ inplace""",
+ ),
+ 3: PinSpecification(
+ name="streams_container",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set""",
+ ),
+ 5: PinSpecification(
+ name="bool_rotate_to_global",
+ type_names=["bool"],
+ optional=True,
+ document="""If true the field is rotated to global
+ coordinate system (default true)""",
+ ),
+ 7: PinSpecification(
+ name="mesh",
+ type_names=["abstract_meshed_region", "meshes_container"],
+ optional=True,
+ document="""Prevents from reading the mesh in the result
+ files""",
+ ),
+ 9: PinSpecification(
+ name="requested_location",
+ type_names=["string"],
+ optional=True,
+ document="""Requested location, default is nodal""",
+ ),
+ 14: PinSpecification(
+ name="read_cyclic",
+ type_names=["enum dataProcessing::ECyclicReading", "int32"],
+ optional=True,
+ document="""If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)""",
+ ),
+ 22: PinSpecification(
+ name="read_beams",
+ type_names=["bool"],
+ optional=True,
+ document="""Elemental nodal beam results are read if this
+ pin is set to true (default is false)""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(name="MFDZ", server=server)
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsMagneticFluxDensityZ
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsMagneticFluxDensityZ
+ """
+ return super().outputs
+
+
+class InputsMagneticFluxDensityZ(_Inputs):
+ """Intermediate class used to connect user inputs to
+ magnetic_flux_density_Z operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Z()
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(magnetic_flux_density_Z._spec().inputs, op)
+ self._time_scoping = Input(
+ magnetic_flux_density_Z._spec().input_pin(0), 0, op, -1
+ )
+ self._inputs.append(self._time_scoping)
+ self._mesh_scoping = Input(
+ magnetic_flux_density_Z._spec().input_pin(1), 1, op, -1
+ )
+ self._inputs.append(self._mesh_scoping)
+ self._fields_container = Input(
+ magnetic_flux_density_Z._spec().input_pin(2), 2, op, -1
+ )
+ self._inputs.append(self._fields_container)
+ self._streams_container = Input(
+ magnetic_flux_density_Z._spec().input_pin(3), 3, op, -1
+ )
+ self._inputs.append(self._streams_container)
+ self._data_sources = Input(
+ magnetic_flux_density_Z._spec().input_pin(4), 4, op, -1
+ )
+ self._inputs.append(self._data_sources)
+ self._bool_rotate_to_global = Input(
+ magnetic_flux_density_Z._spec().input_pin(5), 5, op, -1
+ )
+ self._inputs.append(self._bool_rotate_to_global)
+ self._mesh = Input(magnetic_flux_density_Z._spec().input_pin(7), 7, op, -1)
+ self._inputs.append(self._mesh)
+ self._requested_location = Input(
+ magnetic_flux_density_Z._spec().input_pin(9), 9, op, -1
+ )
+ self._inputs.append(self._requested_location)
+ self._read_cyclic = Input(
+ magnetic_flux_density_Z._spec().input_pin(14), 14, op, -1
+ )
+ self._inputs.append(self._read_cyclic)
+ self._read_beams = Input(
+ magnetic_flux_density_Z._spec().input_pin(22), 22, op, -1
+ )
+ self._inputs.append(self._read_beams)
+
+ @property
+ def time_scoping(self):
+ """Allows to connect time_scoping input to the operator.
+
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+
+ Parameters
+ ----------
+ my_time_scoping : Scoping or int or float or Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Z()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> # or
+ >>> op.inputs.time_scoping(my_time_scoping)
+ """
+ return self._time_scoping
+
+ @property
+ def mesh_scoping(self):
+ """Allows to connect mesh_scoping input to the operator.
+
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+
+ Parameters
+ ----------
+ my_mesh_scoping : ScopingsContainer or Scoping
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Z()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> # or
+ >>> op.inputs.mesh_scoping(my_mesh_scoping)
+ """
+ return self._mesh_scoping
+
+ @property
+ def fields_container(self):
+ """Allows to connect fields_container input to the operator.
+
+ Fieldscontainer already allocated modified
+ inplace
+
+ Parameters
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Z()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> # or
+ >>> op.inputs.fields_container(my_fields_container)
+ """
+ return self._fields_container
+
+ @property
+ def streams_container(self):
+ """Allows to connect streams_container input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data
+
+ Parameters
+ ----------
+ my_streams_container : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Z()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> # or
+ >>> op.inputs.streams_container(my_streams_container)
+ """
+ return self._streams_container
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Z()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+ @property
+ def bool_rotate_to_global(self):
+ """Allows to connect bool_rotate_to_global input to the operator.
+
+ If true the field is rotated to global
+ coordinate system (default true)
+
+ Parameters
+ ----------
+ my_bool_rotate_to_global : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Z()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> # or
+ >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global)
+ """
+ return self._bool_rotate_to_global
+
+ @property
+ def mesh(self):
+ """Allows to connect mesh input to the operator.
+
+ Prevents from reading the mesh in the result
+ files
+
+ Parameters
+ ----------
+ my_mesh : MeshedRegion or MeshesContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Z()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> # or
+ >>> op.inputs.mesh(my_mesh)
+ """
+ return self._mesh
+
+ @property
+ def requested_location(self):
+ """Allows to connect requested_location input to the operator.
+
+ Requested location, default is nodal
+
+ Parameters
+ ----------
+ my_requested_location : str
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Z()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> # or
+ >>> op.inputs.requested_location(my_requested_location)
+ """
+ return self._requested_location
+
+ @property
+ def read_cyclic(self):
+ """Allows to connect read_cyclic input to the operator.
+
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+
+ Parameters
+ ----------
+ my_read_cyclic : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Z()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> # or
+ >>> op.inputs.read_cyclic(my_read_cyclic)
+ """
+ return self._read_cyclic
+
+ @property
+ def read_beams(self):
+ """Allows to connect read_beams input to the operator.
+
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Parameters
+ ----------
+ my_read_beams : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Z()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ >>> # or
+ >>> op.inputs.read_beams(my_read_beams)
+ """
+ return self._read_beams
+
+
+class OutputsMagneticFluxDensityZ(_Outputs):
+ """Intermediate class used to get outputs from
+ magnetic_flux_density_Z operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Z()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(magnetic_flux_density_Z._spec().outputs, op)
+ self._fields_container = Output(
+ magnetic_flux_density_Z._spec().output_pin(0), 0, op
+ )
+ self._outputs.append(self._fields_container)
+
+ @property
+ def fields_container(self):
+ """Allows to get fields_container output of the operator
+
+ Returns
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.magnetic_flux_density_Z()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """ # noqa: E501
+ return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/temperature_grad_X.py b/src/ansys/dpf/core/operators/result/temperature_grad_X.py
new file mode 100644
index 0000000000..cd778e776f
--- /dev/null
+++ b/src/ansys/dpf/core/operators/result/temperature_grad_X.py
@@ -0,0 +1,655 @@
+"""
+temperature_grad_X
+
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class temperature_grad_X(Operator):
+ """Read/compute Temperature Gradient X component of the vector (1st
+ component) by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh scoping, the
+ result location can be Nodal/ElementalNodal/Elemental.
+
+ Parameters
+ ----------
+ time_scoping : Scoping or int or float or Field, optional
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+ mesh_scoping : ScopingsContainer or Scoping, optional
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+ fields_container : FieldsContainer, optional
+ Fieldscontainer already allocated modified
+ inplace
+ streams_container : StreamsContainer, optional
+ Result file container allowed to be kept open
+ to cache data
+ data_sources : DataSources
+ Result file path container, used if no
+ streams are set
+ bool_rotate_to_global : bool, optional
+ If true the field is rotated to global
+ coordinate system (default true)
+ mesh : MeshedRegion or MeshesContainer, optional
+ Prevents from reading the mesh in the result
+ files
+ requested_location : str, optional
+ Requested location, default is nodal
+ read_cyclic : int, optional
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+ read_beams : bool, optional
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Returns
+ -------
+ fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.result.temperature_grad_X()
+
+ >>> # Make input connections
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.result.temperature_grad_X(
+ ... time_scoping=my_time_scoping,
+ ... mesh_scoping=my_mesh_scoping,
+ ... fields_container=my_fields_container,
+ ... streams_container=my_streams_container,
+ ... data_sources=my_data_sources,
+ ... bool_rotate_to_global=my_bool_rotate_to_global,
+ ... mesh=my_mesh,
+ ... requested_location=my_requested_location,
+ ... read_cyclic=my_read_cyclic,
+ ... read_beams=my_read_beams,
+ ... )
+
+ >>> # Get output data
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(
+ self,
+ time_scoping=None,
+ mesh_scoping=None,
+ fields_container=None,
+ streams_container=None,
+ data_sources=None,
+ bool_rotate_to_global=None,
+ mesh=None,
+ requested_location=None,
+ read_cyclic=None,
+ read_beams=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(name="TGX", config=config, server=server)
+ self._inputs = InputsTemperatureGradX(self)
+ self._outputs = OutputsTemperatureGradX(self)
+ if time_scoping is not None:
+ self.inputs.time_scoping.connect(time_scoping)
+ if mesh_scoping is not None:
+ self.inputs.mesh_scoping.connect(mesh_scoping)
+ if fields_container is not None:
+ self.inputs.fields_container.connect(fields_container)
+ if streams_container is not None:
+ self.inputs.streams_container.connect(streams_container)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+ if bool_rotate_to_global is not None:
+ self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global)
+ if mesh is not None:
+ self.inputs.mesh.connect(mesh)
+ if requested_location is not None:
+ self.inputs.requested_location.connect(requested_location)
+ if read_cyclic is not None:
+ self.inputs.read_cyclic.connect(read_cyclic)
+ if read_beams is not None:
+ self.inputs.read_beams.connect(read_beams)
+
+ @staticmethod
+ def _spec():
+ description = """Read/compute Temperature Gradient X component of the vector (1st
+ component) by calling the readers defined by the
+ datasources. Regarding the requested location and the
+ input mesh scoping, the result location can be
+ Nodal/ElementalNodal/Elemental."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 0: PinSpecification(
+ name="time_scoping",
+ type_names=[
+ "scoping",
+ "int32",
+ "vector",
+ "double",
+ "field",
+ "vector",
+ ],
+ optional=True,
+ document="""Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.""",
+ ),
+ 1: PinSpecification(
+ name="mesh_scoping",
+ type_names=["scopings_container", "scoping"],
+ optional=True,
+ document="""Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains""",
+ ),
+ 2: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=True,
+ document="""Fieldscontainer already allocated modified
+ inplace""",
+ ),
+ 3: PinSpecification(
+ name="streams_container",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set""",
+ ),
+ 5: PinSpecification(
+ name="bool_rotate_to_global",
+ type_names=["bool"],
+ optional=True,
+ document="""If true the field is rotated to global
+ coordinate system (default true)""",
+ ),
+ 7: PinSpecification(
+ name="mesh",
+ type_names=["abstract_meshed_region", "meshes_container"],
+ optional=True,
+ document="""Prevents from reading the mesh in the result
+ files""",
+ ),
+ 9: PinSpecification(
+ name="requested_location",
+ type_names=["string"],
+ optional=True,
+ document="""Requested location, default is nodal""",
+ ),
+ 14: PinSpecification(
+ name="read_cyclic",
+ type_names=["enum dataProcessing::ECyclicReading", "int32"],
+ optional=True,
+ document="""If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)""",
+ ),
+ 22: PinSpecification(
+ name="read_beams",
+ type_names=["bool"],
+ optional=True,
+ document="""Elemental nodal beam results are read if this
+ pin is set to true (default is false)""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(name="TGX", server=server)
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsTemperatureGradX
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsTemperatureGradX
+ """
+ return super().outputs
+
+
+class InputsTemperatureGradX(_Inputs):
+ """Intermediate class used to connect user inputs to
+ temperature_grad_X operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_X()
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(temperature_grad_X._spec().inputs, op)
+ self._time_scoping = Input(temperature_grad_X._spec().input_pin(0), 0, op, -1)
+ self._inputs.append(self._time_scoping)
+ self._mesh_scoping = Input(temperature_grad_X._spec().input_pin(1), 1, op, -1)
+ self._inputs.append(self._mesh_scoping)
+ self._fields_container = Input(
+ temperature_grad_X._spec().input_pin(2), 2, op, -1
+ )
+ self._inputs.append(self._fields_container)
+ self._streams_container = Input(
+ temperature_grad_X._spec().input_pin(3), 3, op, -1
+ )
+ self._inputs.append(self._streams_container)
+ self._data_sources = Input(temperature_grad_X._spec().input_pin(4), 4, op, -1)
+ self._inputs.append(self._data_sources)
+ self._bool_rotate_to_global = Input(
+ temperature_grad_X._spec().input_pin(5), 5, op, -1
+ )
+ self._inputs.append(self._bool_rotate_to_global)
+ self._mesh = Input(temperature_grad_X._spec().input_pin(7), 7, op, -1)
+ self._inputs.append(self._mesh)
+ self._requested_location = Input(
+ temperature_grad_X._spec().input_pin(9), 9, op, -1
+ )
+ self._inputs.append(self._requested_location)
+ self._read_cyclic = Input(temperature_grad_X._spec().input_pin(14), 14, op, -1)
+ self._inputs.append(self._read_cyclic)
+ self._read_beams = Input(temperature_grad_X._spec().input_pin(22), 22, op, -1)
+ self._inputs.append(self._read_beams)
+
+ @property
+ def time_scoping(self):
+ """Allows to connect time_scoping input to the operator.
+
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+
+ Parameters
+ ----------
+ my_time_scoping : Scoping or int or float or Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_X()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> # or
+ >>> op.inputs.time_scoping(my_time_scoping)
+ """
+ return self._time_scoping
+
+ @property
+ def mesh_scoping(self):
+ """Allows to connect mesh_scoping input to the operator.
+
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+
+ Parameters
+ ----------
+ my_mesh_scoping : ScopingsContainer or Scoping
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_X()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> # or
+ >>> op.inputs.mesh_scoping(my_mesh_scoping)
+ """
+ return self._mesh_scoping
+
+ @property
+ def fields_container(self):
+ """Allows to connect fields_container input to the operator.
+
+ Fieldscontainer already allocated modified
+ inplace
+
+ Parameters
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_X()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> # or
+ >>> op.inputs.fields_container(my_fields_container)
+ """
+ return self._fields_container
+
+ @property
+ def streams_container(self):
+ """Allows to connect streams_container input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data
+
+ Parameters
+ ----------
+ my_streams_container : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_X()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> # or
+ >>> op.inputs.streams_container(my_streams_container)
+ """
+ return self._streams_container
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_X()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+ @property
+ def bool_rotate_to_global(self):
+ """Allows to connect bool_rotate_to_global input to the operator.
+
+ If true the field is rotated to global
+ coordinate system (default true)
+
+ Parameters
+ ----------
+ my_bool_rotate_to_global : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_X()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> # or
+ >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global)
+ """
+ return self._bool_rotate_to_global
+
+ @property
+ def mesh(self):
+ """Allows to connect mesh input to the operator.
+
+ Prevents from reading the mesh in the result
+ files
+
+ Parameters
+ ----------
+ my_mesh : MeshedRegion or MeshesContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_X()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> # or
+ >>> op.inputs.mesh(my_mesh)
+ """
+ return self._mesh
+
+ @property
+ def requested_location(self):
+ """Allows to connect requested_location input to the operator.
+
+ Requested location, default is nodal
+
+ Parameters
+ ----------
+ my_requested_location : str
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_X()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> # or
+ >>> op.inputs.requested_location(my_requested_location)
+ """
+ return self._requested_location
+
+ @property
+ def read_cyclic(self):
+ """Allows to connect read_cyclic input to the operator.
+
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+
+ Parameters
+ ----------
+ my_read_cyclic : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_X()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> # or
+ >>> op.inputs.read_cyclic(my_read_cyclic)
+ """
+ return self._read_cyclic
+
+ @property
+ def read_beams(self):
+ """Allows to connect read_beams input to the operator.
+
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Parameters
+ ----------
+ my_read_beams : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_X()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ >>> # or
+ >>> op.inputs.read_beams(my_read_beams)
+ """
+ return self._read_beams
+
+
+class OutputsTemperatureGradX(_Outputs):
+ """Intermediate class used to get outputs from
+ temperature_grad_X operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_X()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(temperature_grad_X._spec().outputs, op)
+ self._fields_container = Output(temperature_grad_X._spec().output_pin(0), 0, op)
+ self._outputs.append(self._fields_container)
+
+ @property
+ def fields_container(self):
+ """Allows to get fields_container output of the operator
+
+ Returns
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_X()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """ # noqa: E501
+ return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/temperature_grad_Y.py b/src/ansys/dpf/core/operators/result/temperature_grad_Y.py
new file mode 100644
index 0000000000..d1affd4fd2
--- /dev/null
+++ b/src/ansys/dpf/core/operators/result/temperature_grad_Y.py
@@ -0,0 +1,655 @@
+"""
+temperature_grad_Y
+
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class temperature_grad_Y(Operator):
+ """Read/compute Temperature Gradient Y component of the vector (2nd
+ component) by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh scoping, the
+ result location can be Nodal/ElementalNodal/Elemental.
+
+ Parameters
+ ----------
+ time_scoping : Scoping or int or float or Field, optional
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+ mesh_scoping : ScopingsContainer or Scoping, optional
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+ fields_container : FieldsContainer, optional
+ Fieldscontainer already allocated modified
+ inplace
+ streams_container : StreamsContainer, optional
+ Result file container allowed to be kept open
+ to cache data
+ data_sources : DataSources
+ Result file path container, used if no
+ streams are set
+ bool_rotate_to_global : bool, optional
+ If true the field is rotated to global
+ coordinate system (default true)
+ mesh : MeshedRegion or MeshesContainer, optional
+ Prevents from reading the mesh in the result
+ files
+ requested_location : str, optional
+ Requested location, default is nodal
+ read_cyclic : int, optional
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+ read_beams : bool, optional
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Returns
+ -------
+ fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.result.temperature_grad_Y()
+
+ >>> # Make input connections
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.result.temperature_grad_Y(
+ ... time_scoping=my_time_scoping,
+ ... mesh_scoping=my_mesh_scoping,
+ ... fields_container=my_fields_container,
+ ... streams_container=my_streams_container,
+ ... data_sources=my_data_sources,
+ ... bool_rotate_to_global=my_bool_rotate_to_global,
+ ... mesh=my_mesh,
+ ... requested_location=my_requested_location,
+ ... read_cyclic=my_read_cyclic,
+ ... read_beams=my_read_beams,
+ ... )
+
+ >>> # Get output data
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(
+ self,
+ time_scoping=None,
+ mesh_scoping=None,
+ fields_container=None,
+ streams_container=None,
+ data_sources=None,
+ bool_rotate_to_global=None,
+ mesh=None,
+ requested_location=None,
+ read_cyclic=None,
+ read_beams=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(name="TGY", config=config, server=server)
+ self._inputs = InputsTemperatureGradY(self)
+ self._outputs = OutputsTemperatureGradY(self)
+ if time_scoping is not None:
+ self.inputs.time_scoping.connect(time_scoping)
+ if mesh_scoping is not None:
+ self.inputs.mesh_scoping.connect(mesh_scoping)
+ if fields_container is not None:
+ self.inputs.fields_container.connect(fields_container)
+ if streams_container is not None:
+ self.inputs.streams_container.connect(streams_container)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+ if bool_rotate_to_global is not None:
+ self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global)
+ if mesh is not None:
+ self.inputs.mesh.connect(mesh)
+ if requested_location is not None:
+ self.inputs.requested_location.connect(requested_location)
+ if read_cyclic is not None:
+ self.inputs.read_cyclic.connect(read_cyclic)
+ if read_beams is not None:
+ self.inputs.read_beams.connect(read_beams)
+
+ @staticmethod
+ def _spec():
+ description = """Read/compute Temperature Gradient Y component of the vector (2nd
+ component) by calling the readers defined by the
+ datasources. Regarding the requested location and the
+ input mesh scoping, the result location can be
+ Nodal/ElementalNodal/Elemental."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 0: PinSpecification(
+ name="time_scoping",
+ type_names=[
+ "scoping",
+ "int32",
+ "vector",
+ "double",
+ "field",
+ "vector",
+ ],
+ optional=True,
+ document="""Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.""",
+ ),
+ 1: PinSpecification(
+ name="mesh_scoping",
+ type_names=["scopings_container", "scoping"],
+ optional=True,
+ document="""Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains""",
+ ),
+ 2: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=True,
+ document="""Fieldscontainer already allocated modified
+ inplace""",
+ ),
+ 3: PinSpecification(
+ name="streams_container",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set""",
+ ),
+ 5: PinSpecification(
+ name="bool_rotate_to_global",
+ type_names=["bool"],
+ optional=True,
+ document="""If true the field is rotated to global
+ coordinate system (default true)""",
+ ),
+ 7: PinSpecification(
+ name="mesh",
+ type_names=["abstract_meshed_region", "meshes_container"],
+ optional=True,
+ document="""Prevents from reading the mesh in the result
+ files""",
+ ),
+ 9: PinSpecification(
+ name="requested_location",
+ type_names=["string"],
+ optional=True,
+ document="""Requested location, default is nodal""",
+ ),
+ 14: PinSpecification(
+ name="read_cyclic",
+ type_names=["enum dataProcessing::ECyclicReading", "int32"],
+ optional=True,
+ document="""If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)""",
+ ),
+ 22: PinSpecification(
+ name="read_beams",
+ type_names=["bool"],
+ optional=True,
+ document="""Elemental nodal beam results are read if this
+ pin is set to true (default is false)""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(name="TGY", server=server)
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsTemperatureGradY
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsTemperatureGradY
+ """
+ return super().outputs
+
+
+class InputsTemperatureGradY(_Inputs):
+ """Intermediate class used to connect user inputs to
+ temperature_grad_Y operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Y()
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(temperature_grad_Y._spec().inputs, op)
+ self._time_scoping = Input(temperature_grad_Y._spec().input_pin(0), 0, op, -1)
+ self._inputs.append(self._time_scoping)
+ self._mesh_scoping = Input(temperature_grad_Y._spec().input_pin(1), 1, op, -1)
+ self._inputs.append(self._mesh_scoping)
+ self._fields_container = Input(
+ temperature_grad_Y._spec().input_pin(2), 2, op, -1
+ )
+ self._inputs.append(self._fields_container)
+ self._streams_container = Input(
+ temperature_grad_Y._spec().input_pin(3), 3, op, -1
+ )
+ self._inputs.append(self._streams_container)
+ self._data_sources = Input(temperature_grad_Y._spec().input_pin(4), 4, op, -1)
+ self._inputs.append(self._data_sources)
+ self._bool_rotate_to_global = Input(
+ temperature_grad_Y._spec().input_pin(5), 5, op, -1
+ )
+ self._inputs.append(self._bool_rotate_to_global)
+ self._mesh = Input(temperature_grad_Y._spec().input_pin(7), 7, op, -1)
+ self._inputs.append(self._mesh)
+ self._requested_location = Input(
+ temperature_grad_Y._spec().input_pin(9), 9, op, -1
+ )
+ self._inputs.append(self._requested_location)
+ self._read_cyclic = Input(temperature_grad_Y._spec().input_pin(14), 14, op, -1)
+ self._inputs.append(self._read_cyclic)
+ self._read_beams = Input(temperature_grad_Y._spec().input_pin(22), 22, op, -1)
+ self._inputs.append(self._read_beams)
+
+ @property
+ def time_scoping(self):
+ """Allows to connect time_scoping input to the operator.
+
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+
+ Parameters
+ ----------
+ my_time_scoping : Scoping or int or float or Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Y()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> # or
+ >>> op.inputs.time_scoping(my_time_scoping)
+ """
+ return self._time_scoping
+
+ @property
+ def mesh_scoping(self):
+ """Allows to connect mesh_scoping input to the operator.
+
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+
+ Parameters
+ ----------
+ my_mesh_scoping : ScopingsContainer or Scoping
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Y()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> # or
+ >>> op.inputs.mesh_scoping(my_mesh_scoping)
+ """
+ return self._mesh_scoping
+
+ @property
+ def fields_container(self):
+ """Allows to connect fields_container input to the operator.
+
+ Fieldscontainer already allocated modified
+ inplace
+
+ Parameters
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Y()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> # or
+ >>> op.inputs.fields_container(my_fields_container)
+ """
+ return self._fields_container
+
+ @property
+ def streams_container(self):
+ """Allows to connect streams_container input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data
+
+ Parameters
+ ----------
+ my_streams_container : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Y()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> # or
+ >>> op.inputs.streams_container(my_streams_container)
+ """
+ return self._streams_container
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Y()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+ @property
+ def bool_rotate_to_global(self):
+ """Allows to connect bool_rotate_to_global input to the operator.
+
+ If true the field is rotated to global
+ coordinate system (default true)
+
+ Parameters
+ ----------
+ my_bool_rotate_to_global : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Y()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> # or
+ >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global)
+ """
+ return self._bool_rotate_to_global
+
+ @property
+ def mesh(self):
+ """Allows to connect mesh input to the operator.
+
+ Prevents from reading the mesh in the result
+ files
+
+ Parameters
+ ----------
+ my_mesh : MeshedRegion or MeshesContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Y()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> # or
+ >>> op.inputs.mesh(my_mesh)
+ """
+ return self._mesh
+
+ @property
+ def requested_location(self):
+ """Allows to connect requested_location input to the operator.
+
+ Requested location, default is nodal
+
+ Parameters
+ ----------
+ my_requested_location : str
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Y()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> # or
+ >>> op.inputs.requested_location(my_requested_location)
+ """
+ return self._requested_location
+
+ @property
+ def read_cyclic(self):
+ """Allows to connect read_cyclic input to the operator.
+
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+
+ Parameters
+ ----------
+ my_read_cyclic : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Y()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> # or
+ >>> op.inputs.read_cyclic(my_read_cyclic)
+ """
+ return self._read_cyclic
+
+ @property
+ def read_beams(self):
+ """Allows to connect read_beams input to the operator.
+
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Parameters
+ ----------
+ my_read_beams : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Y()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ >>> # or
+ >>> op.inputs.read_beams(my_read_beams)
+ """
+ return self._read_beams
+
+
+class OutputsTemperatureGradY(_Outputs):
+ """Intermediate class used to get outputs from
+ temperature_grad_Y operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Y()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(temperature_grad_Y._spec().outputs, op)
+ self._fields_container = Output(temperature_grad_Y._spec().output_pin(0), 0, op)
+ self._outputs.append(self._fields_container)
+
+ @property
+ def fields_container(self):
+ """Allows to get fields_container output of the operator
+
+ Returns
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Y()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """ # noqa: E501
+ return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/temperature_grad_Z.py b/src/ansys/dpf/core/operators/result/temperature_grad_Z.py
new file mode 100644
index 0000000000..76961e4763
--- /dev/null
+++ b/src/ansys/dpf/core/operators/result/temperature_grad_Z.py
@@ -0,0 +1,655 @@
+"""
+temperature_grad_Z
+
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class temperature_grad_Z(Operator):
+ """Read/compute Temperature Gradient Z component of the vector (3rd
+ component) by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh scoping, the
+ result location can be Nodal/ElementalNodal/Elemental.
+
+ Parameters
+ ----------
+ time_scoping : Scoping or int or float or Field, optional
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+ mesh_scoping : ScopingsContainer or Scoping, optional
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+ fields_container : FieldsContainer, optional
+ Fieldscontainer already allocated modified
+ inplace
+ streams_container : StreamsContainer, optional
+ Result file container allowed to be kept open
+ to cache data
+ data_sources : DataSources
+ Result file path container, used if no
+ streams are set
+ bool_rotate_to_global : bool, optional
+ If true the field is rotated to global
+ coordinate system (default true)
+ mesh : MeshedRegion or MeshesContainer, optional
+ Prevents from reading the mesh in the result
+ files
+ requested_location : str, optional
+ Requested location, default is nodal
+ read_cyclic : int, optional
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+ read_beams : bool, optional
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Returns
+ -------
+ fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.result.temperature_grad_Z()
+
+ >>> # Make input connections
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.result.temperature_grad_Z(
+ ... time_scoping=my_time_scoping,
+ ... mesh_scoping=my_mesh_scoping,
+ ... fields_container=my_fields_container,
+ ... streams_container=my_streams_container,
+ ... data_sources=my_data_sources,
+ ... bool_rotate_to_global=my_bool_rotate_to_global,
+ ... mesh=my_mesh,
+ ... requested_location=my_requested_location,
+ ... read_cyclic=my_read_cyclic,
+ ... read_beams=my_read_beams,
+ ... )
+
+ >>> # Get output data
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(
+ self,
+ time_scoping=None,
+ mesh_scoping=None,
+ fields_container=None,
+ streams_container=None,
+ data_sources=None,
+ bool_rotate_to_global=None,
+ mesh=None,
+ requested_location=None,
+ read_cyclic=None,
+ read_beams=None,
+ config=None,
+ server=None,
+ ):
+ super().__init__(name="TGZ", config=config, server=server)
+ self._inputs = InputsTemperatureGradZ(self)
+ self._outputs = OutputsTemperatureGradZ(self)
+ if time_scoping is not None:
+ self.inputs.time_scoping.connect(time_scoping)
+ if mesh_scoping is not None:
+ self.inputs.mesh_scoping.connect(mesh_scoping)
+ if fields_container is not None:
+ self.inputs.fields_container.connect(fields_container)
+ if streams_container is not None:
+ self.inputs.streams_container.connect(streams_container)
+ if data_sources is not None:
+ self.inputs.data_sources.connect(data_sources)
+ if bool_rotate_to_global is not None:
+ self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global)
+ if mesh is not None:
+ self.inputs.mesh.connect(mesh)
+ if requested_location is not None:
+ self.inputs.requested_location.connect(requested_location)
+ if read_cyclic is not None:
+ self.inputs.read_cyclic.connect(read_cyclic)
+ if read_beams is not None:
+ self.inputs.read_beams.connect(read_beams)
+
+ @staticmethod
+ def _spec():
+ description = """Read/compute Temperature Gradient Z component of the vector (3rd
+ component) by calling the readers defined by the
+ datasources. Regarding the requested location and the
+ input mesh scoping, the result location can be
+ Nodal/ElementalNodal/Elemental."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 0: PinSpecification(
+ name="time_scoping",
+ type_names=[
+ "scoping",
+ "int32",
+ "vector",
+ "double",
+ "field",
+ "vector",
+ ],
+ optional=True,
+ document="""Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.""",
+ ),
+ 1: PinSpecification(
+ name="mesh_scoping",
+ type_names=["scopings_container", "scoping"],
+ optional=True,
+ document="""Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains""",
+ ),
+ 2: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=True,
+ document="""Fieldscontainer already allocated modified
+ inplace""",
+ ),
+ 3: PinSpecification(
+ name="streams_container",
+ type_names=["streams_container"],
+ optional=True,
+ document="""Result file container allowed to be kept open
+ to cache data""",
+ ),
+ 4: PinSpecification(
+ name="data_sources",
+ type_names=["data_sources"],
+ optional=False,
+ document="""Result file path container, used if no
+ streams are set""",
+ ),
+ 5: PinSpecification(
+ name="bool_rotate_to_global",
+ type_names=["bool"],
+ optional=True,
+ document="""If true the field is rotated to global
+ coordinate system (default true)""",
+ ),
+ 7: PinSpecification(
+ name="mesh",
+ type_names=["abstract_meshed_region", "meshes_container"],
+ optional=True,
+ document="""Prevents from reading the mesh in the result
+ files""",
+ ),
+ 9: PinSpecification(
+ name="requested_location",
+ type_names=["string"],
+ optional=True,
+ document="""Requested location, default is nodal""",
+ ),
+ 14: PinSpecification(
+ name="read_cyclic",
+ type_names=["enum dataProcessing::ECyclicReading", "int32"],
+ optional=True,
+ document="""If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)""",
+ ),
+ 22: PinSpecification(
+ name="read_beams",
+ type_names=["bool"],
+ optional=True,
+ document="""Elemental nodal beam results are read if this
+ pin is set to true (default is false)""",
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="fields_container",
+ type_names=["fields_container"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(name="TGZ", server=server)
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsTemperatureGradZ
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsTemperatureGradZ
+ """
+ return super().outputs
+
+
+class InputsTemperatureGradZ(_Inputs):
+ """Intermediate class used to connect user inputs to
+ temperature_grad_Z operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Z()
+ >>> my_time_scoping = dpf.Scoping()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> my_mesh_scoping = dpf.ScopingsContainer()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> my_fields_container = dpf.FieldsContainer()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> my_streams_container = dpf.StreamsContainer()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> my_data_sources = dpf.DataSources()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> my_bool_rotate_to_global = bool()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> my_mesh = dpf.MeshedRegion()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> my_requested_location = str()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> my_read_cyclic = int()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> my_read_beams = bool()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(temperature_grad_Z._spec().inputs, op)
+ self._time_scoping = Input(temperature_grad_Z._spec().input_pin(0), 0, op, -1)
+ self._inputs.append(self._time_scoping)
+ self._mesh_scoping = Input(temperature_grad_Z._spec().input_pin(1), 1, op, -1)
+ self._inputs.append(self._mesh_scoping)
+ self._fields_container = Input(
+ temperature_grad_Z._spec().input_pin(2), 2, op, -1
+ )
+ self._inputs.append(self._fields_container)
+ self._streams_container = Input(
+ temperature_grad_Z._spec().input_pin(3), 3, op, -1
+ )
+ self._inputs.append(self._streams_container)
+ self._data_sources = Input(temperature_grad_Z._spec().input_pin(4), 4, op, -1)
+ self._inputs.append(self._data_sources)
+ self._bool_rotate_to_global = Input(
+ temperature_grad_Z._spec().input_pin(5), 5, op, -1
+ )
+ self._inputs.append(self._bool_rotate_to_global)
+ self._mesh = Input(temperature_grad_Z._spec().input_pin(7), 7, op, -1)
+ self._inputs.append(self._mesh)
+ self._requested_location = Input(
+ temperature_grad_Z._spec().input_pin(9), 9, op, -1
+ )
+ self._inputs.append(self._requested_location)
+ self._read_cyclic = Input(temperature_grad_Z._spec().input_pin(14), 14, op, -1)
+ self._inputs.append(self._read_cyclic)
+ self._read_beams = Input(temperature_grad_Z._spec().input_pin(22), 22, op, -1)
+ self._inputs.append(self._read_beams)
+
+ @property
+ def time_scoping(self):
+ """Allows to connect time_scoping input to the operator.
+
+ Time/freq values (use doubles or field),
+ time/freq set ids (use ints or
+ scoping) or time/freq step ids (use
+ scoping with timefreq_steps location)
+ required in output. to specify
+ time/freq values at specific load
+ steps, put a field (and not a list)
+ in input with a scoping located on
+ "timefreq_steps". linear time freq
+ intrapolation is performed if the
+ values are not in the result files
+ and the data at the max time or freq
+ is taken when time/freqs are higher
+ than available time/freqs in result
+ files.
+
+ Parameters
+ ----------
+ my_time_scoping : Scoping or int or float or Field
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Z()
+ >>> op.inputs.time_scoping.connect(my_time_scoping)
+ >>> # or
+ >>> op.inputs.time_scoping(my_time_scoping)
+ """
+ return self._time_scoping
+
+ @property
+ def mesh_scoping(self):
+ """Allows to connect mesh_scoping input to the operator.
+
+ Nodes or elements scoping required in output.
+ the output fields will be scoped on
+ these node or element ids. to figure
+ out the ordering of the fields data,
+ look at their scoping ids as they
+ might not be ordered as the input
+ scoping was. the scoping's location
+ indicates whether nodes or elements
+ are asked for. using scopings
+ container allows you to split the
+ result fields container into domains
+
+ Parameters
+ ----------
+ my_mesh_scoping : ScopingsContainer or Scoping
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Z()
+ >>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
+ >>> # or
+ >>> op.inputs.mesh_scoping(my_mesh_scoping)
+ """
+ return self._mesh_scoping
+
+ @property
+ def fields_container(self):
+ """Allows to connect fields_container input to the operator.
+
+ Fieldscontainer already allocated modified
+ inplace
+
+ Parameters
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Z()
+ >>> op.inputs.fields_container.connect(my_fields_container)
+ >>> # or
+ >>> op.inputs.fields_container(my_fields_container)
+ """
+ return self._fields_container
+
+ @property
+ def streams_container(self):
+ """Allows to connect streams_container input to the operator.
+
+ Result file container allowed to be kept open
+ to cache data
+
+ Parameters
+ ----------
+ my_streams_container : StreamsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Z()
+ >>> op.inputs.streams_container.connect(my_streams_container)
+ >>> # or
+ >>> op.inputs.streams_container(my_streams_container)
+ """
+ return self._streams_container
+
+ @property
+ def data_sources(self):
+ """Allows to connect data_sources input to the operator.
+
+ Result file path container, used if no
+ streams are set
+
+ Parameters
+ ----------
+ my_data_sources : DataSources
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Z()
+ >>> op.inputs.data_sources.connect(my_data_sources)
+ >>> # or
+ >>> op.inputs.data_sources(my_data_sources)
+ """
+ return self._data_sources
+
+ @property
+ def bool_rotate_to_global(self):
+ """Allows to connect bool_rotate_to_global input to the operator.
+
+ If true the field is rotated to global
+ coordinate system (default true)
+
+ Parameters
+ ----------
+ my_bool_rotate_to_global : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Z()
+ >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
+ >>> # or
+ >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global)
+ """
+ return self._bool_rotate_to_global
+
+ @property
+ def mesh(self):
+ """Allows to connect mesh input to the operator.
+
+ Prevents from reading the mesh in the result
+ files
+
+ Parameters
+ ----------
+ my_mesh : MeshedRegion or MeshesContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Z()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> # or
+ >>> op.inputs.mesh(my_mesh)
+ """
+ return self._mesh
+
+ @property
+ def requested_location(self):
+ """Allows to connect requested_location input to the operator.
+
+ Requested location, default is nodal
+
+ Parameters
+ ----------
+ my_requested_location : str
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Z()
+ >>> op.inputs.requested_location.connect(my_requested_location)
+ >>> # or
+ >>> op.inputs.requested_location(my_requested_location)
+ """
+ return self._requested_location
+
+ @property
+ def read_cyclic(self):
+ """Allows to connect read_cyclic input to the operator.
+
+ If 0 cyclic symmetry is ignored, if 1 cyclic
+ sector is read, if 2 cyclic expansion
+ is done, if 3 cyclic expansion is
+ done and stages are merged (default
+ is 1)
+
+ Parameters
+ ----------
+ my_read_cyclic : int
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Z()
+ >>> op.inputs.read_cyclic.connect(my_read_cyclic)
+ >>> # or
+ >>> op.inputs.read_cyclic(my_read_cyclic)
+ """
+ return self._read_cyclic
+
+ @property
+ def read_beams(self):
+ """Allows to connect read_beams input to the operator.
+
+ Elemental nodal beam results are read if this
+ pin is set to true (default is false)
+
+ Parameters
+ ----------
+ my_read_beams : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Z()
+ >>> op.inputs.read_beams.connect(my_read_beams)
+ >>> # or
+ >>> op.inputs.read_beams(my_read_beams)
+ """
+ return self._read_beams
+
+
+class OutputsTemperatureGradZ(_Outputs):
+ """Intermediate class used to get outputs from
+ temperature_grad_Z operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Z()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(temperature_grad_Z._spec().outputs, op)
+ self._fields_container = Output(temperature_grad_Z._spec().output_pin(0), 0, op)
+ self._outputs.append(self._fields_container)
+
+ @property
+ def fields_container(self):
+ """Allows to get fields_container output of the operator
+
+ Returns
+ ----------
+ my_fields_container : FieldsContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.result.temperature_grad_Z()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_fields_container = op.outputs.fields_container()
+ """ # noqa: E501
+ return self._fields_container
diff --git a/src/ansys/dpf/core/scopings_container.py b/src/ansys/dpf/core/scopings_container.py
index 64a34a5eb0..c076b9fdc6 100644
--- a/src/ansys/dpf/core/scopings_container.py
+++ b/src/ansys/dpf/core/scopings_container.py
@@ -37,7 +37,7 @@ class ScopingsContainer(CollectionBase[scoping.Scoping]):
Parameters
----------
- scopings_container : ansys.grpc.dpf.collection_pb2.Collection or
+ scopings_container : ansys.grpc.dpf.collection_message_pb2.Collection or
ansys.dpf.core.ScopingsContainer, optional
Create a scopings container from a Collection message or create
a copy from an existing scopings container
diff --git a/src/ansys/dpf/gate/collection_grpcapi.py b/src/ansys/dpf/gate/collection_grpcapi.py
index 3a717c1399..ed94af7e2d 100644
--- a/src/ansys/dpf/gate/collection_grpcapi.py
+++ b/src/ansys/dpf/gate/collection_grpcapi.py
@@ -21,7 +21,7 @@ class CollectionGRPCAPI(collection_abstract_api.CollectionAbstractAPI):
@staticmethod
def init_collection_environment(object):
- from ansys.grpc.dpf import collection_pb2, collection_pb2_grpc
+ from ansys.grpc.dpf import collection_message_pb2, collection_pb2_grpc
if not hasattr(object, "_server"):
server = object
elif isinstance(object._server, weakref.ref):
@@ -32,7 +32,7 @@ def init_collection_environment(object):
CollectionGRPCAPI.STUBNAME, collection_pb2_grpc.CollectionServiceStub)
object._deleter_func = (
- _get_stub(server).Delete, lambda obj: obj._internal_obj if isinstance(obj, collection_pb2.Collection) else None)
+ _get_stub(server).Delete, lambda obj: obj._internal_obj if isinstance(obj, collection_message_pb2.Collection) else None)
@staticmethod
def collection_of_scoping_new_on_client(client):
diff --git a/src/ansys/dpf/gate/data_processing_grpcapi.py b/src/ansys/dpf/gate/data_processing_grpcapi.py
index 28c40fcac7..ae5d0be598 100644
--- a/src/ansys/dpf/gate/data_processing_grpcapi.py
+++ b/src/ansys/dpf/gate/data_processing_grpcapi.py
@@ -210,7 +210,7 @@ def data_processing_get_server_version_on_client(client, major, minor):
@staticmethod
def data_processing_description_string(data):
data_obj = data._internal_obj
- from ansys.grpc.dpf import base_pb2, collection_pb2
+ from ansys.grpc.dpf import base_pb2, collection_message_pb2
request = base_pb2.DescribeRequest()
if isinstance(data_obj.id, int):
request.dpf_type_id = data_obj.id
@@ -224,7 +224,7 @@ def data_processing_description_string(data):
client = serv_to_test.client
else:
return ""
- if isinstance(data_obj, collection_pb2.Collection):
+ if isinstance(data_obj, collection_message_pb2.Collection):
from ansys.dpf.gate import collection_grpcapi
collection_grpcapi.CollectionGRPCAPI.init_collection_environment(data)
response = collection_grpcapi._get_stub(data._server.client).Describe(request)
diff --git a/src/ansys/dpf/gate/generated/any_abstract_api.py b/src/ansys/dpf/gate/generated/any_abstract_api.py
index 752f374dc8..721e9346e4 100644
--- a/src/ansys/dpf/gate/generated/any_abstract_api.py
+++ b/src/ansys/dpf/gate/generated/any_abstract_api.py
@@ -131,6 +131,10 @@ def any_get_as_custom_type_fields_container(any):
def any_get_as_custom_type_field(any):
raise NotImplementedError
+ @staticmethod
+ def any_get_as_support(any):
+ raise NotImplementedError
+
@staticmethod
def any_make_obj_as_any(dpf_object):
raise NotImplementedError
diff --git a/src/ansys/dpf/gate/generated/any_capi.py b/src/ansys/dpf/gate/generated/any_capi.py
index f04345d448..8beed87a4f 100644
--- a/src/ansys/dpf/gate/generated/any_capi.py
+++ b/src/ansys/dpf/gate/generated/any_capi.py
@@ -293,6 +293,15 @@ def any_get_as_custom_type_field(any):
raise errors.DPFServerException(sError.value)
return res
+ @staticmethod
+ def any_get_as_support(any):
+ errorSize = ctypes.c_int(0)
+ sError = ctypes.c_wchar_p()
+ res = capi.dll.Any_getAs_Support(any._internal_obj if any is not None else None, ctypes.byref(utils.to_int32(errorSize)), ctypes.byref(sError))
+ if errorSize.value != 0:
+ raise errors.DPFServerException(sError.value)
+ return res
+
@staticmethod
def any_make_obj_as_any(dpf_object):
errorSize = ctypes.c_int(0)
diff --git a/src/ansys/dpf/gate/generated/capi.py b/src/ansys/dpf/gate/generated/capi.py
index ba00de0514..1e6967c5cc 100644
--- a/src/ansys/dpf/gate/generated/capi.py
+++ b/src/ansys/dpf/gate/generated/capi.py
@@ -140,6 +140,10 @@ def load_api(path):
dll.Any_getAs_CustomTypeField.argtypes = (ctypes.c_void_p, ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
dll.Any_getAs_CustomTypeField.restype = ctypes.c_void_p
+ if hasattr(dll, "Any_getAs_Support"):
+ dll.Any_getAs_Support.argtypes = (ctypes.c_void_p, ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
+ dll.Any_getAs_Support.restype = ctypes.c_void_p
+
if hasattr(dll, "Any_makeObj_asAny"):
dll.Any_makeObj_asAny.argtypes = (ctypes.c_void_p, ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
dll.Any_makeObj_asAny.restype = ctypes.c_void_p
@@ -306,6 +310,10 @@ def load_api(path):
dll.Collection_OfStringNew.argtypes = (ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
dll.Collection_OfStringNew.restype = ctypes.c_void_p
+ if hasattr(dll, "Collection_OfCharNew"):
+ dll.Collection_OfCharNew.argtypes = (ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
+ dll.Collection_OfCharNew.restype = ctypes.c_void_p
+
if hasattr(dll, "Collection_GetDataAsInt"):
dll.Collection_GetDataAsInt.argtypes = (ctypes.c_void_p, ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
dll.Collection_GetDataAsInt.restype = ctypes.POINTER(ctypes.c_int32)
@@ -314,6 +322,10 @@ def load_api(path):
dll.Collection_GetDataAsDouble.argtypes = (ctypes.c_void_p, ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
dll.Collection_GetDataAsDouble.restype = ctypes.POINTER(ctypes.c_double)
+ if hasattr(dll, "Collection_GetDataAsChar"):
+ dll.Collection_GetDataAsChar.argtypes = (ctypes.c_void_p, ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
+ dll.Collection_GetDataAsChar.restype = ctypes.POINTER(ctypes.c_char)
+
if hasattr(dll, "Collection_AddIntEntry"):
dll.Collection_AddIntEntry.argtypes = (ctypes.c_void_p, ctypes.c_int32, ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
dll.Collection_AddIntEntry.restype = None
@@ -386,6 +398,26 @@ def load_api(path):
dll.Collection_OfAnyNew.argtypes = (ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
dll.Collection_OfAnyNew.restype = ctypes.c_void_p
+ if hasattr(dll, "Collection_OfScopingNewWithData"):
+ dll.Collection_OfScopingNewWithData.argtypes = (ctypes.POINTER(ctypes.c_void_p), ctypes.c_int32, ctypes.POINTER(ctypes.POINTER(ctypes.c_char)), ctypes.c_int32, ctypes.POINTER(ctypes.POINTER(ctypes.c_int32)), ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
+ dll.Collection_OfScopingNewWithData.restype = ctypes.c_void_p
+
+ if hasattr(dll, "Collection_OfFieldNewWithData"):
+ dll.Collection_OfFieldNewWithData.argtypes = (ctypes.POINTER(ctypes.c_void_p), ctypes.c_int32, ctypes.POINTER(ctypes.POINTER(ctypes.c_char)), ctypes.c_int32, ctypes.POINTER(ctypes.POINTER(ctypes.c_int32)), ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
+ dll.Collection_OfFieldNewWithData.restype = ctypes.c_void_p
+
+ if hasattr(dll, "Collection_OfMeshNewWithData"):
+ dll.Collection_OfMeshNewWithData.argtypes = (ctypes.POINTER(ctypes.c_void_p), ctypes.c_int32, ctypes.POINTER(ctypes.POINTER(ctypes.c_char)), ctypes.c_int32, ctypes.POINTER(ctypes.POINTER(ctypes.c_int32)), ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
+ dll.Collection_OfMeshNewWithData.restype = ctypes.c_void_p
+
+ if hasattr(dll, "Collection_OfCustomTypeFieldNewWithData"):
+ dll.Collection_OfCustomTypeFieldNewWithData.argtypes = (ctypes.POINTER(ctypes.c_void_p), ctypes.c_int32, ctypes.POINTER(ctypes.POINTER(ctypes.c_char)), ctypes.c_int32, ctypes.POINTER(ctypes.POINTER(ctypes.c_int32)), ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
+ dll.Collection_OfCustomTypeFieldNewWithData.restype = ctypes.c_void_p
+
+ if hasattr(dll, "Collection_OfAnyNewWithData"):
+ dll.Collection_OfAnyNewWithData.argtypes = (ctypes.POINTER(ctypes.c_void_p), ctypes.c_int32, ctypes.POINTER(ctypes.POINTER(ctypes.c_char)), ctypes.c_int32, ctypes.POINTER(ctypes.POINTER(ctypes.c_int32)), ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
+ dll.Collection_OfAnyNewWithData.restype = ctypes.c_void_p
+
if hasattr(dll, "Collection_GetNumLabels"):
dll.Collection_GetNumLabels.argtypes = (ctypes.c_void_p, ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
dll.Collection_GetNumLabels.restype = ctypes.c_int32
@@ -743,6 +775,10 @@ def load_api(path):
dll.DataProcessing_deserialize.argtypes = (ctypes.POINTER(ctypes.c_char), ctypes.c_size_t, ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
dll.DataProcessing_deserialize.restype = ctypes.c_void_p
+ if hasattr(dll, "DataProcessing_deserializeMany"):
+ dll.DataProcessing_deserializeMany.argtypes = (ctypes.POINTER(ctypes.c_char), ctypes.c_size_t, ctypes.POINTER(ctypes.c_size_t), ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
+ dll.DataProcessing_deserializeMany.restype = ctypes.POINTER(ctypes.c_void_p)
+
if hasattr(dll, "DataProcessing_getGlobalConfigAsDataTree"):
dll.DataProcessing_getGlobalConfigAsDataTree.argtypes = (ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
dll.DataProcessing_getGlobalConfigAsDataTree.restype = ctypes.c_void_p
@@ -891,6 +927,10 @@ def load_api(path):
dll.DataProcessing_create_param_tree_on_client.argtypes = (ctypes.c_void_p, ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
dll.DataProcessing_create_param_tree_on_client.restype = ctypes.c_void_p
+ if hasattr(dll, "DataProcessing_create_from_on_client"):
+ dll.DataProcessing_create_from_on_client.argtypes = (ctypes.c_void_p, ctypes.c_void_p, ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
+ dll.DataProcessing_create_from_on_client.restype = ctypes.c_void_p
+
#-------------------------------------------------------------------------------
# DataProcessingError
#-------------------------------------------------------------------------------
@@ -3935,6 +3975,10 @@ def load_api(path):
dll.CSStringField_SetCScoping.argtypes = (ctypes.c_void_p, ctypes.c_void_p, ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
dll.CSStringField_SetCScoping.restype = None
+ if hasattr(dll, "CSStringField_SetDataPointer"):
+ dll.CSStringField_SetDataPointer.argtypes = (ctypes.c_void_p, ctypes.c_int32, ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
+ dll.CSStringField_SetDataPointer.restype = None
+
if hasattr(dll, "CSStringField_PushBack"):
dll.CSStringField_PushBack.argtypes = (ctypes.c_void_p, ctypes.c_int32, ctypes.c_int32, ctypes.POINTER(ctypes.POINTER(ctypes.c_char)), ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_wchar_p), )
dll.CSStringField_PushBack.restype = None
diff --git a/src/ansys/dpf/gate/generated/collection_abstract_api.py b/src/ansys/dpf/gate/generated/collection_abstract_api.py
index d3b10a6c89..2d0804ab00 100644
--- a/src/ansys/dpf/gate/generated/collection_abstract_api.py
+++ b/src/ansys/dpf/gate/generated/collection_abstract_api.py
@@ -23,6 +23,10 @@ def collection_of_double_new():
def collection_of_string_new():
raise NotImplementedError
+ @staticmethod
+ def collection_of_char_new():
+ raise NotImplementedError
+
@staticmethod
def collection_get_data_as_int(collection, size):
raise NotImplementedError
@@ -31,6 +35,10 @@ def collection_get_data_as_int(collection, size):
def collection_get_data_as_double(collection, size):
raise NotImplementedError
+ @staticmethod
+ def collection_get_data_as_char(collection, size):
+ raise NotImplementedError
+
@staticmethod
def collection_add_int_entry(collection, obj):
raise NotImplementedError
@@ -103,6 +111,26 @@ def collection_of_custom_type_field_new():
def collection_of_any_new():
raise NotImplementedError
+ @staticmethod
+ def collection_of_scoping_new_with_data(data, num_ids, labels, num_labels, ids):
+ raise NotImplementedError
+
+ @staticmethod
+ def collection_of_field_new_with_data(data, num_ids, labels, num_labels, ids):
+ raise NotImplementedError
+
+ @staticmethod
+ def collection_of_mesh_new_with_data(data, num_ids, labels, num_labels, ids):
+ raise NotImplementedError
+
+ @staticmethod
+ def collection_of_custom_type_field_new_with_data(data, num_ids, labels, num_labels, ids):
+ raise NotImplementedError
+
+ @staticmethod
+ def collection_of_any_new_with_data(data, num_ids, labels, num_labels, ids):
+ raise NotImplementedError
+
@staticmethod
def collection_get_num_labels(collection):
raise NotImplementedError
diff --git a/src/ansys/dpf/gate/generated/collection_capi.py b/src/ansys/dpf/gate/generated/collection_capi.py
index 7c4823c5a5..55da9adbbc 100644
--- a/src/ansys/dpf/gate/generated/collection_capi.py
+++ b/src/ansys/dpf/gate/generated/collection_capi.py
@@ -44,6 +44,15 @@ def collection_of_string_new():
raise errors.DPFServerException(sError.value)
return res
+ @staticmethod
+ def collection_of_char_new():
+ errorSize = ctypes.c_int(0)
+ sError = ctypes.c_wchar_p()
+ res = capi.dll.Collection_OfCharNew(ctypes.byref(utils.to_int32(errorSize)), ctypes.byref(sError))
+ if errorSize.value != 0:
+ raise errors.DPFServerException(sError.value)
+ return res
+
@staticmethod
def collection_get_data_as_int(collection, size):
errorSize = ctypes.c_int(0)
@@ -62,6 +71,17 @@ def collection_get_data_as_double(collection, size):
raise errors.DPFServerException(sError.value)
return res
+ @staticmethod
+ def collection_get_data_as_char(collection, size):
+ errorSize = ctypes.c_int(0)
+ sError = ctypes.c_wchar_p()
+ res = capi.dll.Collection_GetDataAsChar(collection._internal_obj if collection is not None else None, ctypes.byref(utils.to_int32(size)), ctypes.byref(utils.to_int32(errorSize)), ctypes.byref(sError))
+ if errorSize.value != 0:
+ raise errors.DPFServerException(sError.value)
+ newres = ctypes.cast(res, ctypes.c_char_p).value.decode("utf-8") if res else None
+ capi.dll.DataProcessing_String_post_event(res, ctypes.byref(errorSize), ctypes.byref(sError))
+ return newres
+
@staticmethod
def collection_add_int_entry(collection, obj):
errorSize = ctypes.c_int(0)
@@ -226,6 +246,51 @@ def collection_of_any_new():
raise errors.DPFServerException(sError.value)
return res
+ @staticmethod
+ def collection_of_scoping_new_with_data(data, num_ids, labels, num_labels, ids):
+ errorSize = ctypes.c_int(0)
+ sError = ctypes.c_wchar_p()
+ res = capi.dll.Collection_OfScopingNewWithData(data, utils.to_int32(num_ids), utils.to_char_ptr_ptr(labels), utils.to_int32(num_labels), utils.to_int32_ptr_ptr(ids), ctypes.byref(utils.to_int32(errorSize)), ctypes.byref(sError))
+ if errorSize.value != 0:
+ raise errors.DPFServerException(sError.value)
+ return res
+
+ @staticmethod
+ def collection_of_field_new_with_data(data, num_ids, labels, num_labels, ids):
+ errorSize = ctypes.c_int(0)
+ sError = ctypes.c_wchar_p()
+ res = capi.dll.Collection_OfFieldNewWithData(data, utils.to_int32(num_ids), utils.to_char_ptr_ptr(labels), utils.to_int32(num_labels), utils.to_int32_ptr_ptr(ids), ctypes.byref(utils.to_int32(errorSize)), ctypes.byref(sError))
+ if errorSize.value != 0:
+ raise errors.DPFServerException(sError.value)
+ return res
+
+ @staticmethod
+ def collection_of_mesh_new_with_data(data, num_ids, labels, num_labels, ids):
+ errorSize = ctypes.c_int(0)
+ sError = ctypes.c_wchar_p()
+ res = capi.dll.Collection_OfMeshNewWithData(data, utils.to_int32(num_ids), utils.to_char_ptr_ptr(labels), utils.to_int32(num_labels), utils.to_int32_ptr_ptr(ids), ctypes.byref(utils.to_int32(errorSize)), ctypes.byref(sError))
+ if errorSize.value != 0:
+ raise errors.DPFServerException(sError.value)
+ return res
+
+ @staticmethod
+ def collection_of_custom_type_field_new_with_data(data, num_ids, labels, num_labels, ids):
+ errorSize = ctypes.c_int(0)
+ sError = ctypes.c_wchar_p()
+ res = capi.dll.Collection_OfCustomTypeFieldNewWithData(data, utils.to_int32(num_ids), utils.to_char_ptr_ptr(labels), utils.to_int32(num_labels), utils.to_int32_ptr_ptr(ids), ctypes.byref(utils.to_int32(errorSize)), ctypes.byref(sError))
+ if errorSize.value != 0:
+ raise errors.DPFServerException(sError.value)
+ return res
+
+ @staticmethod
+ def collection_of_any_new_with_data(data, num_ids, labels, num_labels, ids):
+ errorSize = ctypes.c_int(0)
+ sError = ctypes.c_wchar_p()
+ res = capi.dll.Collection_OfAnyNewWithData(data, utils.to_int32(num_ids), utils.to_char_ptr_ptr(labels), utils.to_int32(num_labels), utils.to_int32_ptr_ptr(ids), ctypes.byref(utils.to_int32(errorSize)), ctypes.byref(sError))
+ if errorSize.value != 0:
+ raise errors.DPFServerException(sError.value)
+ return res
+
@staticmethod
def collection_get_num_labels(collection):
errorSize = ctypes.c_int(0)
diff --git a/src/ansys/dpf/gate/generated/data_processing_abstract_api.py b/src/ansys/dpf/gate/generated/data_processing_abstract_api.py
index 03c20b603f..0dfeac5af9 100644
--- a/src/ansys/dpf/gate/generated/data_processing_abstract_api.py
+++ b/src/ansys/dpf/gate/generated/data_processing_abstract_api.py
@@ -115,6 +115,10 @@ def data_processing_serialize(obj):
def data_processing_deserialize(data, dataSize):
raise NotImplementedError
+ @staticmethod
+ def data_processing_deserialize_many(str, strSize, size):
+ raise NotImplementedError
+
@staticmethod
def data_processing_get_global_config_as_data_tree():
raise NotImplementedError
@@ -263,3 +267,7 @@ def data_processing_process_id_on_client(client):
def data_processing_create_param_tree_on_client(client):
raise NotImplementedError
+ @staticmethod
+ def data_processing_create_from_on_client(client, base):
+ raise NotImplementedError
+
diff --git a/src/ansys/dpf/gate/generated/data_processing_capi.py b/src/ansys/dpf/gate/generated/data_processing_capi.py
index 468f6a982f..91ba055d7b 100644
--- a/src/ansys/dpf/gate/generated/data_processing_capi.py
+++ b/src/ansys/dpf/gate/generated/data_processing_capi.py
@@ -238,6 +238,15 @@ def data_processing_deserialize(data, dataSize):
raise errors.DPFServerException(sError.value)
return res
+ @staticmethod
+ def data_processing_deserialize_many(str, strSize, size):
+ errorSize = ctypes.c_int(0)
+ sError = ctypes.c_wchar_p()
+ res = capi.dll.DataProcessing_deserializeMany(utils.to_char_ptr(str), strSize, size, ctypes.byref(utils.to_int32(errorSize)), ctypes.byref(sError))
+ if errorSize.value != 0:
+ raise errors.DPFServerException(sError.value)
+ return res
+
@staticmethod
def data_processing_get_global_config_as_data_tree():
errorSize = ctypes.c_int(0)
@@ -569,3 +578,12 @@ def data_processing_create_param_tree_on_client(client):
raise errors.DPFServerException(sError.value)
return res
+ @staticmethod
+ def data_processing_create_from_on_client(client, base):
+ errorSize = ctypes.c_int(0)
+ sError = ctypes.c_wchar_p()
+ res = capi.dll.DataProcessing_create_from_on_client(client._internal_obj if client is not None else None, base._internal_obj if base is not None else None, ctypes.byref(utils.to_int32(errorSize)), ctypes.byref(sError))
+ if errorSize.value != 0:
+ raise errors.DPFServerException(sError.value)
+ return res
+
diff --git a/src/ansys/dpf/gate/generated/string_field_abstract_api.py b/src/ansys/dpf/gate/generated/string_field_abstract_api.py
index 93ad33970a..09af2ded76 100644
--- a/src/ansys/dpf/gate/generated/string_field_abstract_api.py
+++ b/src/ansys/dpf/gate/generated/string_field_abstract_api.py
@@ -87,6 +87,10 @@ def csstring_field_set_data_with_size(field, size, data, sizes):
def csstring_field_set_cscoping(field, scoping):
raise NotImplementedError
+ @staticmethod
+ def csstring_field_set_data_pointer(field, size, data):
+ raise NotImplementedError
+
@staticmethod
def csstring_field_push_back(field, EntityId, size, data):
raise NotImplementedError
diff --git a/src/ansys/dpf/gate/generated/string_field_capi.py b/src/ansys/dpf/gate/generated/string_field_capi.py
index 6c0b97ee1f..6dca80bf71 100644
--- a/src/ansys/dpf/gate/generated/string_field_capi.py
+++ b/src/ansys/dpf/gate/generated/string_field_capi.py
@@ -190,6 +190,15 @@ def csstring_field_set_cscoping(field, scoping):
raise errors.DPFServerException(sError.value)
return res
+ @staticmethod
+ def csstring_field_set_data_pointer(field, size, data):
+ errorSize = ctypes.c_int(0)
+ sError = ctypes.c_wchar_p()
+ res = capi.dll.CSStringField_SetDataPointer(field._internal_obj if field is not None else None, utils.to_int32(size), utils.to_int32_ptr(data), ctypes.byref(utils.to_int32(errorSize)), ctypes.byref(sError))
+ if errorSize.value != 0:
+ raise errors.DPFServerException(sError.value)
+ return res
+
@staticmethod
def csstring_field_push_back(field, EntityId, size, data):
errorSize = ctypes.c_int(0)
diff --git a/src/ansys/dpf/gatebin/Ans.Dpf.GrpcClient.dll b/src/ansys/dpf/gatebin/Ans.Dpf.GrpcClient.dll
index d4ec333117..9611c638da 100644
Binary files a/src/ansys/dpf/gatebin/Ans.Dpf.GrpcClient.dll and b/src/ansys/dpf/gatebin/Ans.Dpf.GrpcClient.dll differ
diff --git a/src/ansys/dpf/gatebin/DPFClientAPI.dll b/src/ansys/dpf/gatebin/DPFClientAPI.dll
index f10bba2bc7..54987ae7ef 100644
Binary files a/src/ansys/dpf/gatebin/DPFClientAPI.dll and b/src/ansys/dpf/gatebin/DPFClientAPI.dll differ
diff --git a/src/ansys/dpf/gatebin/libAns.Dpf.GrpcClient.so b/src/ansys/dpf/gatebin/libAns.Dpf.GrpcClient.so
index 16a31ed831..9462a8ae25 100644
Binary files a/src/ansys/dpf/gatebin/libAns.Dpf.GrpcClient.so and b/src/ansys/dpf/gatebin/libAns.Dpf.GrpcClient.so differ
diff --git a/src/ansys/dpf/gatebin/libDPFClientAPI.so b/src/ansys/dpf/gatebin/libDPFClientAPI.so
index 6637378884..6cb303743e 100644
Binary files a/src/ansys/dpf/gatebin/libDPFClientAPI.so and b/src/ansys/dpf/gatebin/libDPFClientAPI.so differ
diff --git a/src/ansys/grpc/dpf/base_pb2.py b/src/ansys/grpc/dpf/base_pb2.py
index 602a5d2e18..92e59e094c 100644
--- a/src/ansys/grpc/dpf/base_pb2.py
+++ b/src/ansys/grpc/dpf/base_pb2.py
@@ -14,7 +14,7 @@
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\nbase.proto\x12\x15\x61nsys.api.dpf.base.v0\"\x07\n\x05\x45mpty\"6\n\x10\x45ntityIdentifier\x12\n\n\x02id\x18\x01 \x01(\x05\x12\x16\n\x0eserver_address\x18\x02 \x01(\t\"\"\n\x0c\x44oubleVector\x12\x12\n\nrep_double\x18\x01 \x03(\x01\" \n\x0b\x46loatVector\x12\x11\n\trep_float\x18\x01 \x03(\x02\"\x1c\n\tIntVector\x12\x0f\n\x07rep_int\x18\x01 \x03(\x05\"\x1f\n\nByteVector\x12\x11\n\trep_bytes\x18\x01 \x01(\x0c\"\x1a\n\x08PBString\x12\x0e\n\x06string\x18\x01 \x01(\t\"\"\n\x0cStringVector\x12\x12\n\nrep_string\x18\x01 \x03(\t\"4\n\x03Ids\x12-\n\x03ids\x18\x01 \x01(\x0b\x32 .ansys.api.dpf.base.v0.IntVector\"\x1c\n\x08Location\x12\x10\n\x08location\x18\x01 \x01(\t\"\x1e\n\rCountResponse\x12\r\n\x05\x63ount\x18\x01 \x01(\x05\"\x16\n\x05\x41rray\x12\r\n\x05\x61rray\x18\x01 \x01(\x0c\"V\n\rPluginRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07\x64llPath\x18\x02 \x01(\t\x12\x0e\n\x06symbol\x18\x03 \x01(\t\x12\x16\n\x0egenerate_files\x18\x04 \x01(\x08\"\x13\n\x11ServerInfoRequest\"\xef\x01\n\x12ServerInfoResponse\x12\x14\n\x0cmajorVersion\x18\x01 \x01(\x05\x12\x14\n\x0cminorVersion\x18\x02 \x01(\x05\x12\x11\n\tprocessId\x18\x03 \x01(\x04\x12\n\n\x02ip\x18\x04 \x01(\t\x12\x0c\n\x04port\x18\x05 \x01(\x05\x12M\n\nproperties\x18\x06 \x03(\x0b\x32\x39.ansys.api.dpf.base.v0.ServerInfoResponse.PropertiesEntry\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"&\n\x0f\x44\x65scribeRequest\x12\x13\n\x0b\x64pf_type_id\x18\x01 \x01(\x05\"M\n\rDeleteRequest\x12<\n\x0b\x64pf_type_id\x18\x01 \x03(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\"S\n\x13\x44uplicateRefRequest\x12<\n\x0b\x64pf_type_id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\"\'\n\x10\x44\x65scribeResponse\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\"D\n\x15\x44\x65scribeArrayResponse\x12+\n\x05\x61rray\x18\x01 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.Array\"X\n\x14\x44uplicateRefResponse\x12@\n\x0fnew_dpf_type_id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\"2\n\x08\x46ileData\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x18\n\x10server_file_path\x18\x02 \x01(\t\"/\n\x13\x44ownloadFileRequest\x12\x18\n\x10server_file_path\x18\x01 \x01(\t\"E\n\x14\x44ownloadFileResponse\x12-\n\x04\x64\x61ta\x18\x01 \x01(\x0b\x32\x1f.ansys.api.dpf.base.v0.FileData\"r\n\x11UploadFileRequest\x12\x18\n\x10server_file_path\x18\x01 \x01(\t\x12\x14\n\x0cuse_temp_dir\x18\x02 \x01(\x08\x12-\n\x04\x64\x61ta\x18\x03 \x01(\x0b\x32\x1f.ansys.api.dpf.base.v0.FileData\".\n\x12UploadFileResponse\x12\x18\n\x10server_file_path\x18\x01 \x01(\t\"G\n\x10SerializeRequest\x12\x33\n\x02id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\"\"\n\x11SerializeResponse\x12\r\n\x05\x61rray\x18\x01 \x01(\x0c\"c\n\x0e\x43onfigResponse\x12Q\n runtime_core_config_data_tree_id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\">\n\x05\x45rror\x12\n\n\x02ok\x18\x01 \x01(\x08\x12\x12\n\nerror_code\x18\x02 \x01(\x05\x12\x15\n\rerror_message\x18\x03 \x01(\t\"\xac\x01\n\x15InitializationRequest\x12\x0f\n\x07\x63ontext\x18\x01 \x01(\x05\x12\x0b\n\x03xml\x18\x02 \x01(\t\x12\x14\n\x0c\x66orce_reinit\x18\x03 \x01(\x08\x12\x18\n\x0b\x61pi_version\x18\x04 \x01(\x05H\x00\x88\x01\x01\x12\x17\n\x0flicense_context\x18\x05 \x01(\x05\x12\x1c\n\x14\x65rror_on_plugin_load\x18\x06 \x01(\x05\x42\x0e\n\x0c_api_version\"E\n\x16InitializationResponse\x12+\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.Error*U\n\x0b\x43ountEntity\x12\x11\n\rNUM_COMPONENT\x10\x00\x12\x17\n\x13NUM_ELEMENTARY_DATA\x10\x01\x12\x0c\n\x08NUM_SETS\x10\x02\x12\x0c\n\x08NUM_DATA\x10\x03*\"\n\x07\x43omplex\x12\x08\n\x04REAL\x10\x00\x12\r\n\tIMAGINARY\x10\x01*;\n\x06Nature\x12\n\n\x06SCALAR\x10\x00\x12\n\n\x06VECTOR\x10\x01\x12\n\n\x06MATRIX\x10\x02\x12\r\n\tSYMMATRIX\x10\x05*\x8a\x03\n\x04Type\x12\n\n\x06STRING\x10\x00\x12\x07\n\x03INT\x10\x01\x12\n\n\x06\x44OUBLE\x10\x02\x12\x08\n\x04\x42OOL\x10\x03\x12\t\n\x05\x46IELD\x10\x04\x12\x0e\n\nCOLLECTION\x10\x05\x12\x0b\n\x07SCOPING\x10\x06\x12\x10\n\x0c\x44\x41TA_SOURCES\x10\x07\x12\x11\n\rMESHED_REGION\x10\x08\x12\x15\n\x11TIME_FREQ_SUPPORT\x10\t\x12\x0f\n\x0bRESULT_INFO\x10\n\x12\x12\n\x0e\x43YCLIC_SUPPORT\x10\x0b\x12\x12\n\x0ePROPERTY_FIELD\x10\x0c\x12\x0c\n\x08WORKFLOW\x10\r\x12\x07\n\x03RUN\x10\x0e\x12\x07\n\x03\x41NY\x10\x0f\x12\x0b\n\x07VEC_INT\x10\x10\x12\x0e\n\nVEC_DOUBLE\x10\x11\x12\x0b\n\x07SUPPORT\x10\x12\x12\x0c\n\x08OPERATOR\x10\x13\x12\r\n\tDATA_TREE\x10\x14\x12\x0e\n\nVEC_STRING\x10\x15\x12\x10\n\x0cSTRING_FIELD\x10\x16\x12\x15\n\x11\x43USTOM_TYPE_FIELD\x10\x17\x12\x1a\n\x16GENERIC_DATA_CONTAINER\x10\x18\x32\xa3\n\n\x0b\x42\x61seService\x12i\n\nInitialize\x12,.ansys.api.dpf.base.v0.InitializationRequest\x1a-.ansys.api.dpf.base.v0.InitializationResponse\x12\x64\n\rGetServerInfo\x12(.ansys.api.dpf.base.v0.ServerInfoRequest\x1a).ansys.api.dpf.base.v0.ServerInfoResponse\x12P\n\tGetConfig\x12\x1c.ansys.api.dpf.base.v0.Empty\x1a%.ansys.api.dpf.base.v0.ConfigResponse\x12J\n\x04Load\x12$.ansys.api.dpf.base.v0.PluginRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12[\n\x08\x44\x65scribe\x12&.ansys.api.dpf.base.v0.DescribeRequest\x1a\'.ansys.api.dpf.base.v0.DescribeResponse\x12j\n\x10\x44\x65scribeStreamed\x12&.ansys.api.dpf.base.v0.DescribeRequest\x1a,.ansys.api.dpf.base.v0.DescribeArrayResponse0\x01\x12L\n\x06\x44\x65lete\x12$.ansys.api.dpf.base.v0.DeleteRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12`\n\tSerialize\x12\'.ansys.api.dpf.base.v0.SerializeRequest\x1a(.ansys.api.dpf.base.v0.SerializeResponse0\x01\x12g\n\x0c\x44uplicateRef\x12*.ansys.api.dpf.base.v0.DuplicateRefRequest\x1a+.ansys.api.dpf.base.v0.DuplicateRefResponse\x12W\n\x0c\x43reateTmpDir\x12\x1c.ansys.api.dpf.base.v0.Empty\x1a).ansys.api.dpf.base.v0.UploadFileResponse\x12i\n\x0c\x44ownloadFile\x12*.ansys.api.dpf.base.v0.DownloadFileRequest\x1a+.ansys.api.dpf.base.v0.DownloadFileResponse0\x01\x12\x63\n\nUploadFile\x12(.ansys.api.dpf.base.v0.UploadFileRequest\x1a).ansys.api.dpf.base.v0.UploadFileResponse(\x01\x12M\n\x0fPrepareShutdown\x12\x1c.ansys.api.dpf.base.v0.Empty\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12K\n\rReleaseServer\x12\x1c.ansys.api.dpf.base.v0.Empty\x1a\x1c.ansys.api.dpf.base.v0.EmptyB\x18\xaa\x02\x15\x41nsys.Api.Dpf.Base.V0b\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\nbase.proto\x12\x15\x61nsys.api.dpf.base.v0\"\x07\n\x05\x45mpty\"6\n\x10\x45ntityIdentifier\x12\n\n\x02id\x18\x01 \x01(\x05\x12\x16\n\x0eserver_address\x18\x02 \x01(\t\"\"\n\x0c\x44oubleVector\x12\x12\n\nrep_double\x18\x01 \x03(\x01\" \n\x0b\x46loatVector\x12\x11\n\trep_float\x18\x01 \x03(\x02\"\x1c\n\tIntVector\x12\x0f\n\x07rep_int\x18\x01 \x03(\x05\"\x1f\n\nByteVector\x12\x11\n\trep_bytes\x18\x01 \x01(\x0c\"\x1a\n\x08PBString\x12\x0e\n\x06string\x18\x01 \x01(\t\"\"\n\x0cStringVector\x12\x12\n\nrep_string\x18\x01 \x03(\t\"4\n\x03Ids\x12-\n\x03ids\x18\x01 \x01(\x0b\x32 .ansys.api.dpf.base.v0.IntVector\"\x1c\n\x08Location\x12\x10\n\x08location\x18\x01 \x01(\t\"\x1e\n\rCountResponse\x12\r\n\x05\x63ount\x18\x01 \x01(\x05\"\x16\n\x05\x41rray\x12\r\n\x05\x61rray\x18\x01 \x01(\x0c\"V\n\rPluginRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07\x64llPath\x18\x02 \x01(\t\x12\x0e\n\x06symbol\x18\x03 \x01(\t\x12\x16\n\x0egenerate_files\x18\x04 \x01(\x08\"\x13\n\x11ServerInfoRequest\"\xef\x01\n\x12ServerInfoResponse\x12\x14\n\x0cmajorVersion\x18\x01 \x01(\x05\x12\x14\n\x0cminorVersion\x18\x02 \x01(\x05\x12\x11\n\tprocessId\x18\x03 \x01(\x04\x12\n\n\x02ip\x18\x04 \x01(\t\x12\x0c\n\x04port\x18\x05 \x01(\x05\x12M\n\nproperties\x18\x06 \x03(\x0b\x32\x39.ansys.api.dpf.base.v0.ServerInfoResponse.PropertiesEntry\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"&\n\x0f\x44\x65scribeRequest\x12\x13\n\x0b\x64pf_type_id\x18\x01 \x01(\x05\"M\n\rDeleteRequest\x12<\n\x0b\x64pf_type_id\x18\x01 \x03(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\"S\n\x13\x44uplicateRefRequest\x12<\n\x0b\x64pf_type_id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\"\'\n\x10\x44\x65scribeResponse\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\"D\n\x15\x44\x65scribeArrayResponse\x12+\n\x05\x61rray\x18\x01 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.Array\"X\n\x14\x44uplicateRefResponse\x12@\n\x0fnew_dpf_type_id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\"2\n\x08\x46ileData\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x18\n\x10server_file_path\x18\x02 \x01(\t\"/\n\x13\x44ownloadFileRequest\x12\x18\n\x10server_file_path\x18\x01 \x01(\t\"E\n\x14\x44ownloadFileResponse\x12-\n\x04\x64\x61ta\x18\x01 \x01(\x0b\x32\x1f.ansys.api.dpf.base.v0.FileData\"r\n\x11UploadFileRequest\x12\x18\n\x10server_file_path\x18\x01 \x01(\t\x12\x14\n\x0cuse_temp_dir\x18\x02 \x01(\x08\x12-\n\x04\x64\x61ta\x18\x03 \x01(\x0b\x32\x1f.ansys.api.dpf.base.v0.FileData\".\n\x12UploadFileResponse\x12\x18\n\x10server_file_path\x18\x01 \x01(\t\"G\n\x10SerializeRequest\x12\x33\n\x02id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\"\"\n\x11SerializeResponse\x12\r\n\x05\x61rray\x18\x01 \x01(\x0c\"c\n\x0e\x43onfigResponse\x12Q\n runtime_core_config_data_tree_id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\">\n\x05\x45rror\x12\n\n\x02ok\x18\x01 \x01(\x08\x12\x12\n\nerror_code\x18\x02 \x01(\x05\x12\x15\n\rerror_message\x18\x03 \x01(\t\"\xac\x01\n\x15InitializationRequest\x12\x0f\n\x07\x63ontext\x18\x01 \x01(\x05\x12\x0b\n\x03xml\x18\x02 \x01(\t\x12\x14\n\x0c\x66orce_reinit\x18\x03 \x01(\x08\x12\x18\n\x0b\x61pi_version\x18\x04 \x01(\x05H\x00\x88\x01\x01\x12\x17\n\x0flicense_context\x18\x05 \x01(\x05\x12\x1c\n\x14\x65rror_on_plugin_load\x18\x06 \x01(\x05\x42\x0e\n\x0c_api_version\"E\n\x16InitializationResponse\x12+\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.Error*U\n\x0b\x43ountEntity\x12\x11\n\rNUM_COMPONENT\x10\x00\x12\x17\n\x13NUM_ELEMENTARY_DATA\x10\x01\x12\x0c\n\x08NUM_SETS\x10\x02\x12\x0c\n\x08NUM_DATA\x10\x03*\"\n\x07\x43omplex\x12\x08\n\x04REAL\x10\x00\x12\r\n\tIMAGINARY\x10\x01*;\n\x06Nature\x12\n\n\x06SCALAR\x10\x00\x12\n\n\x06VECTOR\x10\x01\x12\n\n\x06MATRIX\x10\x02\x12\r\n\tSYMMATRIX\x10\x05*\x94\x03\n\x04Type\x12\n\n\x06STRING\x10\x00\x12\x07\n\x03INT\x10\x01\x12\n\n\x06\x44OUBLE\x10\x02\x12\x08\n\x04\x42OOL\x10\x03\x12\x08\n\x04\x43HAR\x10\x19\x12\t\n\x05\x46IELD\x10\x04\x12\x0e\n\nCOLLECTION\x10\x05\x12\x0b\n\x07SCOPING\x10\x06\x12\x10\n\x0c\x44\x41TA_SOURCES\x10\x07\x12\x11\n\rMESHED_REGION\x10\x08\x12\x15\n\x11TIME_FREQ_SUPPORT\x10\t\x12\x0f\n\x0bRESULT_INFO\x10\n\x12\x12\n\x0e\x43YCLIC_SUPPORT\x10\x0b\x12\x12\n\x0ePROPERTY_FIELD\x10\x0c\x12\x0c\n\x08WORKFLOW\x10\r\x12\x07\n\x03RUN\x10\x0e\x12\x07\n\x03\x41NY\x10\x0f\x12\x0b\n\x07VEC_INT\x10\x10\x12\x0e\n\nVEC_DOUBLE\x10\x11\x12\x0b\n\x07SUPPORT\x10\x12\x12\x0c\n\x08OPERATOR\x10\x13\x12\r\n\tDATA_TREE\x10\x14\x12\x0e\n\nVEC_STRING\x10\x15\x12\x10\n\x0cSTRING_FIELD\x10\x16\x12\x15\n\x11\x43USTOM_TYPE_FIELD\x10\x17\x12\x1a\n\x16GENERIC_DATA_CONTAINER\x10\x18\x32\xfb\n\n\x0b\x42\x61seService\x12i\n\nInitialize\x12,.ansys.api.dpf.base.v0.InitializationRequest\x1a-.ansys.api.dpf.base.v0.InitializationResponse\x12\x64\n\rGetServerInfo\x12(.ansys.api.dpf.base.v0.ServerInfoRequest\x1a).ansys.api.dpf.base.v0.ServerInfoResponse\x12P\n\tGetConfig\x12\x1c.ansys.api.dpf.base.v0.Empty\x1a%.ansys.api.dpf.base.v0.ConfigResponse\x12J\n\x04Load\x12$.ansys.api.dpf.base.v0.PluginRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12[\n\x08\x44\x65scribe\x12&.ansys.api.dpf.base.v0.DescribeRequest\x1a\'.ansys.api.dpf.base.v0.DescribeResponse\x12j\n\x10\x44\x65scribeStreamed\x12&.ansys.api.dpf.base.v0.DescribeRequest\x1a,.ansys.api.dpf.base.v0.DescribeArrayResponse0\x01\x12L\n\x06\x44\x65lete\x12$.ansys.api.dpf.base.v0.DeleteRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12V\n\x0e\x44\x65leteStreamed\x12$.ansys.api.dpf.base.v0.DeleteRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty(\x01\x12`\n\tSerialize\x12\'.ansys.api.dpf.base.v0.SerializeRequest\x1a(.ansys.api.dpf.base.v0.SerializeResponse0\x01\x12g\n\x0c\x44uplicateRef\x12*.ansys.api.dpf.base.v0.DuplicateRefRequest\x1a+.ansys.api.dpf.base.v0.DuplicateRefResponse\x12W\n\x0c\x43reateTmpDir\x12\x1c.ansys.api.dpf.base.v0.Empty\x1a).ansys.api.dpf.base.v0.UploadFileResponse\x12i\n\x0c\x44ownloadFile\x12*.ansys.api.dpf.base.v0.DownloadFileRequest\x1a+.ansys.api.dpf.base.v0.DownloadFileResponse0\x01\x12\x63\n\nUploadFile\x12(.ansys.api.dpf.base.v0.UploadFileRequest\x1a).ansys.api.dpf.base.v0.UploadFileResponse(\x01\x12M\n\x0fPrepareShutdown\x12\x1c.ansys.api.dpf.base.v0.Empty\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12K\n\rReleaseServer\x12\x1c.ansys.api.dpf.base.v0.Empty\x1a\x1c.ansys.api.dpf.base.v0.EmptyB\x18\xaa\x02\x15\x41nsys.Api.Dpf.Base.V0b\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -31,7 +31,7 @@
_globals['_NATURE']._serialized_start=2174
_globals['_NATURE']._serialized_end=2233
_globals['_TYPE']._serialized_start=2236
- _globals['_TYPE']._serialized_end=2630
+ _globals['_TYPE']._serialized_end=2640
_globals['_EMPTY']._serialized_start=37
_globals['_EMPTY']._serialized_end=44
_globals['_ENTITYIDENTIFIER']._serialized_start=46
@@ -98,6 +98,6 @@
_globals['_INITIALIZATIONREQUEST']._serialized_end=1978
_globals['_INITIALIZATIONRESPONSE']._serialized_start=1980
_globals['_INITIALIZATIONRESPONSE']._serialized_end=2049
- _globals['_BASESERVICE']._serialized_start=2633
- _globals['_BASESERVICE']._serialized_end=3948
+ _globals['_BASESERVICE']._serialized_start=2643
+ _globals['_BASESERVICE']._serialized_end=4046
# @@protoc_insertion_point(module_scope)
diff --git a/src/ansys/grpc/dpf/base_pb2_grpc.py b/src/ansys/grpc/dpf/base_pb2_grpc.py
index af83f1eeed..18316bae10 100644
--- a/src/ansys/grpc/dpf/base_pb2_grpc.py
+++ b/src/ansys/grpc/dpf/base_pb2_grpc.py
@@ -49,6 +49,11 @@ def __init__(self, channel):
request_serializer=base__pb2.DeleteRequest.SerializeToString,
response_deserializer=base__pb2.Empty.FromString,
)
+ self.DeleteStreamed = channel.stream_unary(
+ '/ansys.api.dpf.base.v0.BaseService/DeleteStreamed',
+ request_serializer=base__pb2.DeleteRequest.SerializeToString,
+ response_deserializer=base__pb2.Empty.FromString,
+ )
self.Serialize = channel.unary_stream(
'/ansys.api.dpf.base.v0.BaseService/Serialize',
request_serializer=base__pb2.SerializeRequest.SerializeToString,
@@ -134,6 +139,12 @@ def Delete(self, request, context):
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
+ def DeleteStreamed(self, request_iterator, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
def Serialize(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
@@ -221,6 +232,11 @@ def add_BaseServiceServicer_to_server(servicer, server):
request_deserializer=base__pb2.DeleteRequest.FromString,
response_serializer=base__pb2.Empty.SerializeToString,
),
+ 'DeleteStreamed': grpc.stream_unary_rpc_method_handler(
+ servicer.DeleteStreamed,
+ request_deserializer=base__pb2.DeleteRequest.FromString,
+ response_serializer=base__pb2.Empty.SerializeToString,
+ ),
'Serialize': grpc.unary_stream_rpc_method_handler(
servicer.Serialize,
request_deserializer=base__pb2.SerializeRequest.FromString,
@@ -385,6 +401,23 @@ def Delete(request,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+ @staticmethod
+ def DeleteStreamed(request_iterator,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.stream_unary(request_iterator, target, '/ansys.api.dpf.base.v0.BaseService/DeleteStreamed',
+ base__pb2.DeleteRequest.SerializeToString,
+ base__pb2.Empty.FromString,
+ options, channel_credentials,
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
@staticmethod
def Serialize(request,
target,
diff --git a/src/ansys/grpc/dpf/collection_message_pb2.py b/src/ansys/grpc/dpf/collection_message_pb2.py
new file mode 100644
index 0000000000..ae21a9d84d
--- /dev/null
+++ b/src/ansys/grpc/dpf/collection_message_pb2.py
@@ -0,0 +1,28 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: collection_message.proto
+# Protobuf Python Version: 4.25.1
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+import ansys.grpc.dpf.base_pb2 as base__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18\x63ollection_message.proto\x12\x1b\x61nsys.api.dpf.collection.v0\x1a\nbase.proto\"l\n\nCollection\x12\x33\n\x02id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.TypeB\x1e\xaa\x02\x1b\x41nsys.Api.Dpf.Collection.v0b\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'collection_message_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+ _globals['DESCRIPTOR']._options = None
+ _globals['DESCRIPTOR']._serialized_options = b'\252\002\033Ansys.Api.Dpf.Collection.v0'
+ _globals['_COLLECTION']._serialized_start=69
+ _globals['_COLLECTION']._serialized_end=177
+# @@protoc_insertion_point(module_scope)
diff --git a/src/ansys/grpc/dpf/collection_message_pb2_grpc.py b/src/ansys/grpc/dpf/collection_message_pb2_grpc.py
new file mode 100644
index 0000000000..2daafffebf
--- /dev/null
+++ b/src/ansys/grpc/dpf/collection_message_pb2_grpc.py
@@ -0,0 +1,4 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
diff --git a/src/ansys/grpc/dpf/collection_pb2.py b/src/ansys/grpc/dpf/collection_pb2.py
index 5c7a939baa..265b9c2205 100644
--- a/src/ansys/grpc/dpf/collection_pb2.py
+++ b/src/ansys/grpc/dpf/collection_pb2.py
@@ -18,9 +18,11 @@
import ansys.grpc.dpf.time_freq_support_pb2 as time__freq__support__pb2
import ansys.grpc.dpf.scoping_pb2 as scoping__pb2
import ansys.grpc.dpf.label_space_pb2 as label__space__pb2
+import ansys.grpc.dpf.dpf_any_message_pb2 as dpf__any__message__pb2
+import ansys.grpc.dpf.collection_message_pb2 as collection__message__pb2
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x63ollection.proto\x12\x1b\x61nsys.api.dpf.collection.v0\x1a\x19google/protobuf/any.proto\x1a\nbase.proto\x1a\rsupport.proto\x1a\x17time_freq_support.proto\x1a\rscoping.proto\x1a\x11label_space.proto\"l\n\nCollection\x12\x33\n\x02id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\">\n\x11\x43ollectionRequest\x12)\n\x04type\x18\x01 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\"%\n\x0c\x44\x65\x66\x61ultValue\x12\x15\n\rdefault_value\x18\x01 \x01(\x05\"[\n\x08NewLabel\x12\r\n\x05label\x18\x01 \x01(\t\x12@\n\rdefault_value\x18\x02 \x01(\x0b\x32).ansys.api.dpf.collection.v0.DefaultValue\"\xa2\x01\n\x13UpdateLabelsRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12\x35\n\x06labels\x18\x02 \x03(\x0b\x32%.ansys.api.dpf.collection.v0.NewLabel\x12\x17\n\x0foverride_others\x18\x03 \x01(\x08\"\x87\x02\n\rUpdateRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12\x31\n\x05\x65ntry\x18\x02 \x01(\x0b\x32\".ansys.api.dpf.collection.v0.Entry\x12?\n\x0blabel_space\x18\x03 \x01(\x0b\x32(.ansys.api.dpf.label_space.v0.LabelSpaceH\x00\x12\x0f\n\x05index\x18\x04 \x01(\x05H\x00\x12\x18\n\x0b\x63\x61n_replace\x18\x05 \x01(\x08H\x01\x88\x01\x01\x42\n\n\x08locationB\x0e\n\x0c_can_replace\"\xa9\x01\n\x0c\x45ntryRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12?\n\x0blabel_space\x18\x03 \x01(\x0b\x32(.ansys.api.dpf.label_space.v0.LabelSpaceH\x00\x12\x0f\n\x05index\x18\x04 \x01(\x05H\x00\x42\n\n\x08location\"\xbb\x01\n\x05\x45ntry\x12(\n\x08\x64pf_type\x18\x01 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x12\n\x08int_type\x18\x02 \x01(\x05H\x00\x12\x15\n\x0b\x64ouble_type\x18\x03 \x01(\x01H\x00\x12\x15\n\x0bstring_type\x18\x04 \x01(\tH\x00\x12=\n\x0blabel_space\x18\x05 \x01(\x0b\x32(.ansys.api.dpf.label_space.v0.LabelSpaceB\x07\n\x05\x65ntry\"I\n\x12GetEntriesResponse\x12\x33\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\".ansys.api.dpf.collection.v0.Entry\"N\n\x19GetEntriesIndicesResponse\x12\x31\n\x07indices\x18\x01 \x01(\x0b\x32 .ansys.api.dpf.base.v0.IntVector\"\x18\n\x06Labels\x12\x0e\n\x06labels\x18\x01 \x03(\t\"h\n\x0cListResponse\x12\x33\n\x06labels\x18\x01 \x01(\x0b\x32#.ansys.api.dpf.collection.v0.Labels\x12\x15\n\rcount_entries\x18\x02 \x01(\x05\x12\x0c\n\x04name\x18\x03 \x01(\t\"a\n\x13LabelScopingRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12\r\n\x05label\x18\x02 \x01(\t\"P\n\x14LabelScopingResponse\x12\x38\n\rlabel_scoping\x18\x01 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.Scoping\"\x87\x01\n\x0eSupportRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\x12\r\n\x05label\x18\x03 \x01(\t\"\xfa\x01\n\x14UpdateSupportRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12\r\n\x05label\x18\x02 \x01(\t\x12P\n\x11time_freq_support\x18\x03 \x01(\x0b\x32\x33.ansys.api.dpf.time_freq_support.v0.TimeFreqSupportH\x00\x12\x34\n\x07support\x18\x04 \x01(\x0b\x32!.ansys.api.dpf.support.v0.SupportH\x00\x42\x0e\n\x0csupport_type\"P\n\x11GetAllDataRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\"b\n\x14UpdateAllDataRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12\r\n\x05\x61rray\x18\x02 \x01(\x0c\"\xf6\x01\n\x17UpdateCollectionRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12\x65\n\x11string_properties\x18\x02 \x03(\x0b\x32J.ansys.api.dpf.collection.v0.UpdateCollectionRequest.StringPropertiesEntry\x1a\x37\n\x15StringPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"o\n\x11UpdateSizeRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12\x0c\n\x04size\x18\x02 \x01(\x05\x12\x0f\n\x07reserve\x18\x03 \x01(\x08\x32\xcf\x0b\n\x11\x43ollectionService\x12\x61\n\x06\x43reate\x12..ansys.api.dpf.collection.v0.CollectionRequest\x1a\'.ansys.api.dpf.collection.v0.Collection\x12\\\n\x06Update\x12\x34.ansys.api.dpf.collection.v0.UpdateCollectionRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12^\n\x0cUpdateLabels\x12\x30.ansys.api.dpf.collection.v0.UpdateLabelsRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12W\n\x0bUpdateEntry\x12*.ansys.api.dpf.collection.v0.UpdateRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12Z\n\nUpdateSize\x12..ansys.api.dpf.collection.v0.UpdateSizeRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12Z\n\x04List\x12\'.ansys.api.dpf.collection.v0.Collection\x1a).ansys.api.dpf.collection.v0.ListResponse\x12h\n\nGetEntries\x12).ansys.api.dpf.collection.v0.EntryRequest\x1a/.ansys.api.dpf.collection.v0.GetEntriesResponse\x12v\n\x11GetEntriesIndices\x12).ansys.api.dpf.collection.v0.EntryRequest\x1a\x36.ansys.api.dpf.collection.v0.GetEntriesIndicesResponse\x12\\\n\nGetSupport\x12+.ansys.api.dpf.collection.v0.SupportRequest\x1a!.ansys.api.dpf.support.v0.Support\x12v\n\x0fGetLabelScoping\x12\x30.ansys.api.dpf.collection.v0.LabelScopingRequest\x1a\x31.ansys.api.dpf.collection.v0.LabelScopingResponse\x12`\n\rUpdateSupport\x12\x31.ansys.api.dpf.collection.v0.UpdateSupportRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12\\\n\nGetAllData\x12..ansys.api.dpf.collection.v0.GetAllDataRequest\x1a\x1c.ansys.api.dpf.base.v0.Array0\x01\x12\x62\n\rUpdateAllData\x12\x31.ansys.api.dpf.collection.v0.UpdateAllDataRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty(\x01\x12[\n\x08\x44\x65scribe\x12&.ansys.api.dpf.base.v0.DescribeRequest\x1a\'.ansys.api.dpf.base.v0.DescribeResponse\x12O\n\x06\x44\x65lete\x12\'.ansys.api.dpf.collection.v0.Collection\x1a\x1c.ansys.api.dpf.base.v0.EmptyB\x1e\xaa\x02\x1b\x41nsys.Api.Dpf.Collection.v0b\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x63ollection.proto\x12\x1b\x61nsys.api.dpf.collection.v0\x1a\x19google/protobuf/any.proto\x1a\nbase.proto\x1a\rsupport.proto\x1a\x17time_freq_support.proto\x1a\rscoping.proto\x1a\x11label_space.proto\x1a\x15\x64pf_any_message.proto\x1a\x18\x63ollection_message.proto\">\n\x11\x43ollectionRequest\x12)\n\x04type\x18\x01 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\"%\n\x0c\x44\x65\x66\x61ultValue\x12\x15\n\rdefault_value\x18\x01 \x01(\x05\"[\n\x08NewLabel\x12\r\n\x05label\x18\x01 \x01(\t\x12@\n\rdefault_value\x18\x02 \x01(\x0b\x32).ansys.api.dpf.collection.v0.DefaultValue\"\xa2\x01\n\x13UpdateLabelsRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12\x35\n\x06labels\x18\x02 \x03(\x0b\x32%.ansys.api.dpf.collection.v0.NewLabel\x12\x17\n\x0foverride_others\x18\x03 \x01(\x08\"\x87\x02\n\rUpdateRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12\x31\n\x05\x65ntry\x18\x02 \x01(\x0b\x32\".ansys.api.dpf.collection.v0.Entry\x12?\n\x0blabel_space\x18\x03 \x01(\x0b\x32(.ansys.api.dpf.label_space.v0.LabelSpaceH\x00\x12\x0f\n\x05index\x18\x04 \x01(\x05H\x00\x12\x18\n\x0b\x63\x61n_replace\x18\x05 \x01(\x08H\x01\x88\x01\x01\x42\n\n\x08locationB\x0e\n\x0c_can_replace\"\xa9\x01\n\x0c\x45ntryRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12?\n\x0blabel_space\x18\x03 \x01(\x0b\x32(.ansys.api.dpf.label_space.v0.LabelSpaceH\x00\x12\x0f\n\x05index\x18\x04 \x01(\x05H\x00\x42\n\n\x08location\"\xbb\x01\n\x05\x45ntry\x12(\n\x08\x64pf_type\x18\x01 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x12\n\x08int_type\x18\x02 \x01(\x05H\x00\x12\x15\n\x0b\x64ouble_type\x18\x03 \x01(\x01H\x00\x12\x15\n\x0bstring_type\x18\x04 \x01(\tH\x00\x12=\n\x0blabel_space\x18\x05 \x01(\x0b\x32(.ansys.api.dpf.label_space.v0.LabelSpaceB\x07\n\x05\x65ntry\"I\n\x12GetEntriesResponse\x12\x33\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\".ansys.api.dpf.collection.v0.Entry\"N\n\x19GetEntriesIndicesResponse\x12\x31\n\x07indices\x18\x01 \x01(\x0b\x32 .ansys.api.dpf.base.v0.IntVector\"\x18\n\x06Labels\x12\x0e\n\x06labels\x18\x01 \x03(\t\"h\n\x0cListResponse\x12\x33\n\x06labels\x18\x01 \x01(\x0b\x32#.ansys.api.dpf.collection.v0.Labels\x12\x15\n\rcount_entries\x18\x02 \x01(\x05\x12\x0c\n\x04name\x18\x03 \x01(\t\"a\n\x13LabelScopingRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12\r\n\x05label\x18\x02 \x01(\t\"P\n\x14LabelScopingResponse\x12\x38\n\rlabel_scoping\x18\x01 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.Scoping\"\x87\x01\n\x0eSupportRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\x12\r\n\x05label\x18\x03 \x01(\t\"\xfa\x01\n\x14UpdateSupportRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12\r\n\x05label\x18\x02 \x01(\t\x12P\n\x11time_freq_support\x18\x03 \x01(\x0b\x32\x33.ansys.api.dpf.time_freq_support.v0.TimeFreqSupportH\x00\x12\x34\n\x07support\x18\x04 \x01(\x0b\x32!.ansys.api.dpf.support.v0.SupportH\x00\x42\x0e\n\x0csupport_type\"P\n\x11GetAllDataRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\"b\n\x14UpdateAllDataRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12\r\n\x05\x61rray\x18\x02 \x01(\x0c\"\xf6\x01\n\x17UpdateCollectionRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12\x65\n\x11string_properties\x18\x02 \x03(\x0b\x32J.ansys.api.dpf.collection.v0.UpdateCollectionRequest.StringPropertiesEntry\x1a\x37\n\x15StringPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"o\n\x11UpdateSizeRequest\x12;\n\ncollection\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12\x0c\n\x04size\x18\x02 \x01(\x05\x12\x0f\n\x07reserve\x18\x03 \x01(\x08\"\xfe\x03\n\x17SingleCollectionAllData\x12)\n\x04type\x18\x01 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\x12\\\n\rids_per_label\x18\x02 \x03(\x0b\x32\x45.ansys.api.dpf.collection.v0.SingleCollectionAllData.IdsPerLabelEntry\x12\x64\n\x11support_per_label\x18\x03 \x03(\x0b\x32I.ansys.api.dpf.collection.v0.SingleCollectionAllData.SupportPerLabelEntry\x12\x35\n\x04\x64\x61ta\x18\x04 \x03(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\x1a[\n\x10IdsPerLabelEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection:\x02\x38\x01\x1a`\n\x14SupportPerLabelEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x37\n\x05value\x18\x02 \x01(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAny:\x02\x38\x01\"j\n\x07\x41llData\x12I\n\x0b\x63ollections\x18\x01 \x03(\x0b\x32\x34.ansys.api.dpf.collection.v0.SingleCollectionAllData\x12\x14\n\x0c\x63oll_indices\x18\x02 \x03(\x04\"_\n\x13IntegralAllDataSize\x12\x18\n\x10\x63ollection_index\x18\x01 \x01(\x04\x12\x1a\n\x12\x63urrent_chunk_size\x18\x02 \x01(\x04\x12\x12\n\ntotal_size\x18\x03 \x01(\x04\"\x8c\x01\n\x0fIntegralAllData\x12)\n\x04type\x18\x01 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\x12\r\n\x05\x61rray\x18\x02 \x01(\x0c\x12?\n\x05sizes\x18\x03 \x03(\x0b\x32\x30.ansys.api.dpf.collection.v0.IntegralAllDataSize\"V\n\x16\x43reateWithDataResponse\x12<\n\x0b\x63ollections\x18\x01 \x03(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection2\xc1\r\n\x11\x43ollectionService\x12\x61\n\x06\x43reate\x12..ansys.api.dpf.collection.v0.CollectionRequest\x1a\'.ansys.api.dpf.collection.v0.Collection\x12o\n\x0e\x43reateWithData\x12$.ansys.api.dpf.collection.v0.AllData\x1a\x33.ansys.api.dpf.collection.v0.CreateWithDataResponse(\x01\x30\x01\x12\x7f\n\x16\x43reateIntegralWithData\x12,.ansys.api.dpf.collection.v0.IntegralAllData\x1a\x33.ansys.api.dpf.collection.v0.CreateWithDataResponse(\x01\x30\x01\x12\\\n\x06Update\x12\x34.ansys.api.dpf.collection.v0.UpdateCollectionRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12^\n\x0cUpdateLabels\x12\x30.ansys.api.dpf.collection.v0.UpdateLabelsRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12W\n\x0bUpdateEntry\x12*.ansys.api.dpf.collection.v0.UpdateRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12Z\n\nUpdateSize\x12..ansys.api.dpf.collection.v0.UpdateSizeRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12Z\n\x04List\x12\'.ansys.api.dpf.collection.v0.Collection\x1a).ansys.api.dpf.collection.v0.ListResponse\x12h\n\nGetEntries\x12).ansys.api.dpf.collection.v0.EntryRequest\x1a/.ansys.api.dpf.collection.v0.GetEntriesResponse\x12v\n\x11GetEntriesIndices\x12).ansys.api.dpf.collection.v0.EntryRequest\x1a\x36.ansys.api.dpf.collection.v0.GetEntriesIndicesResponse\x12\\\n\nGetSupport\x12+.ansys.api.dpf.collection.v0.SupportRequest\x1a!.ansys.api.dpf.support.v0.Support\x12v\n\x0fGetLabelScoping\x12\x30.ansys.api.dpf.collection.v0.LabelScopingRequest\x1a\x31.ansys.api.dpf.collection.v0.LabelScopingResponse\x12`\n\rUpdateSupport\x12\x31.ansys.api.dpf.collection.v0.UpdateSupportRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12\\\n\nGetAllData\x12..ansys.api.dpf.collection.v0.GetAllDataRequest\x1a\x1c.ansys.api.dpf.base.v0.Array0\x01\x12\x62\n\rUpdateAllData\x12\x31.ansys.api.dpf.collection.v0.UpdateAllDataRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty(\x01\x12[\n\x08\x44\x65scribe\x12&.ansys.api.dpf.base.v0.DescribeRequest\x1a\'.ansys.api.dpf.base.v0.DescribeResponse\x12O\n\x06\x44\x65lete\x12\'.ansys.api.dpf.collection.v0.Collection\x1a\x1c.ansys.api.dpf.base.v0.EmptyB\x1e\xaa\x02\x1b\x41nsys.Api.Dpf.Collection.v0b\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -30,48 +32,64 @@
_globals['DESCRIPTOR']._serialized_options = b'\252\002\033Ansys.Api.Dpf.Collection.v0'
_globals['_UPDATECOLLECTIONREQUEST_STRINGPROPERTIESENTRY']._options = None
_globals['_UPDATECOLLECTIONREQUEST_STRINGPROPERTIESENTRY']._serialized_options = b'8\001'
- _globals['_COLLECTION']._serialized_start=162
- _globals['_COLLECTION']._serialized_end=270
- _globals['_COLLECTIONREQUEST']._serialized_start=272
- _globals['_COLLECTIONREQUEST']._serialized_end=334
- _globals['_DEFAULTVALUE']._serialized_start=336
- _globals['_DEFAULTVALUE']._serialized_end=373
- _globals['_NEWLABEL']._serialized_start=375
- _globals['_NEWLABEL']._serialized_end=466
- _globals['_UPDATELABELSREQUEST']._serialized_start=469
- _globals['_UPDATELABELSREQUEST']._serialized_end=631
- _globals['_UPDATEREQUEST']._serialized_start=634
- _globals['_UPDATEREQUEST']._serialized_end=897
- _globals['_ENTRYREQUEST']._serialized_start=900
- _globals['_ENTRYREQUEST']._serialized_end=1069
- _globals['_ENTRY']._serialized_start=1072
- _globals['_ENTRY']._serialized_end=1259
- _globals['_GETENTRIESRESPONSE']._serialized_start=1261
- _globals['_GETENTRIESRESPONSE']._serialized_end=1334
- _globals['_GETENTRIESINDICESRESPONSE']._serialized_start=1336
- _globals['_GETENTRIESINDICESRESPONSE']._serialized_end=1414
- _globals['_LABELS']._serialized_start=1416
- _globals['_LABELS']._serialized_end=1440
- _globals['_LISTRESPONSE']._serialized_start=1442
- _globals['_LISTRESPONSE']._serialized_end=1546
- _globals['_LABELSCOPINGREQUEST']._serialized_start=1548
- _globals['_LABELSCOPINGREQUEST']._serialized_end=1645
- _globals['_LABELSCOPINGRESPONSE']._serialized_start=1647
- _globals['_LABELSCOPINGRESPONSE']._serialized_end=1727
- _globals['_SUPPORTREQUEST']._serialized_start=1730
- _globals['_SUPPORTREQUEST']._serialized_end=1865
- _globals['_UPDATESUPPORTREQUEST']._serialized_start=1868
- _globals['_UPDATESUPPORTREQUEST']._serialized_end=2118
- _globals['_GETALLDATAREQUEST']._serialized_start=2120
- _globals['_GETALLDATAREQUEST']._serialized_end=2200
- _globals['_UPDATEALLDATAREQUEST']._serialized_start=2202
- _globals['_UPDATEALLDATAREQUEST']._serialized_end=2300
- _globals['_UPDATECOLLECTIONREQUEST']._serialized_start=2303
- _globals['_UPDATECOLLECTIONREQUEST']._serialized_end=2549
- _globals['_UPDATECOLLECTIONREQUEST_STRINGPROPERTIESENTRY']._serialized_start=2494
- _globals['_UPDATECOLLECTIONREQUEST_STRINGPROPERTIESENTRY']._serialized_end=2549
- _globals['_UPDATESIZEREQUEST']._serialized_start=2551
- _globals['_UPDATESIZEREQUEST']._serialized_end=2662
- _globals['_COLLECTIONSERVICE']._serialized_start=2665
- _globals['_COLLECTIONSERVICE']._serialized_end=4152
+ _globals['_SINGLECOLLECTIONALLDATA_IDSPERLABELENTRY']._options = None
+ _globals['_SINGLECOLLECTIONALLDATA_IDSPERLABELENTRY']._serialized_options = b'8\001'
+ _globals['_SINGLECOLLECTIONALLDATA_SUPPORTPERLABELENTRY']._options = None
+ _globals['_SINGLECOLLECTIONALLDATA_SUPPORTPERLABELENTRY']._serialized_options = b'8\001'
+ _globals['_COLLECTIONREQUEST']._serialized_start=211
+ _globals['_COLLECTIONREQUEST']._serialized_end=273
+ _globals['_DEFAULTVALUE']._serialized_start=275
+ _globals['_DEFAULTVALUE']._serialized_end=312
+ _globals['_NEWLABEL']._serialized_start=314
+ _globals['_NEWLABEL']._serialized_end=405
+ _globals['_UPDATELABELSREQUEST']._serialized_start=408
+ _globals['_UPDATELABELSREQUEST']._serialized_end=570
+ _globals['_UPDATEREQUEST']._serialized_start=573
+ _globals['_UPDATEREQUEST']._serialized_end=836
+ _globals['_ENTRYREQUEST']._serialized_start=839
+ _globals['_ENTRYREQUEST']._serialized_end=1008
+ _globals['_ENTRY']._serialized_start=1011
+ _globals['_ENTRY']._serialized_end=1198
+ _globals['_GETENTRIESRESPONSE']._serialized_start=1200
+ _globals['_GETENTRIESRESPONSE']._serialized_end=1273
+ _globals['_GETENTRIESINDICESRESPONSE']._serialized_start=1275
+ _globals['_GETENTRIESINDICESRESPONSE']._serialized_end=1353
+ _globals['_LABELS']._serialized_start=1355
+ _globals['_LABELS']._serialized_end=1379
+ _globals['_LISTRESPONSE']._serialized_start=1381
+ _globals['_LISTRESPONSE']._serialized_end=1485
+ _globals['_LABELSCOPINGREQUEST']._serialized_start=1487
+ _globals['_LABELSCOPINGREQUEST']._serialized_end=1584
+ _globals['_LABELSCOPINGRESPONSE']._serialized_start=1586
+ _globals['_LABELSCOPINGRESPONSE']._serialized_end=1666
+ _globals['_SUPPORTREQUEST']._serialized_start=1669
+ _globals['_SUPPORTREQUEST']._serialized_end=1804
+ _globals['_UPDATESUPPORTREQUEST']._serialized_start=1807
+ _globals['_UPDATESUPPORTREQUEST']._serialized_end=2057
+ _globals['_GETALLDATAREQUEST']._serialized_start=2059
+ _globals['_GETALLDATAREQUEST']._serialized_end=2139
+ _globals['_UPDATEALLDATAREQUEST']._serialized_start=2141
+ _globals['_UPDATEALLDATAREQUEST']._serialized_end=2239
+ _globals['_UPDATECOLLECTIONREQUEST']._serialized_start=2242
+ _globals['_UPDATECOLLECTIONREQUEST']._serialized_end=2488
+ _globals['_UPDATECOLLECTIONREQUEST_STRINGPROPERTIESENTRY']._serialized_start=2433
+ _globals['_UPDATECOLLECTIONREQUEST_STRINGPROPERTIESENTRY']._serialized_end=2488
+ _globals['_UPDATESIZEREQUEST']._serialized_start=2490
+ _globals['_UPDATESIZEREQUEST']._serialized_end=2601
+ _globals['_SINGLECOLLECTIONALLDATA']._serialized_start=2604
+ _globals['_SINGLECOLLECTIONALLDATA']._serialized_end=3114
+ _globals['_SINGLECOLLECTIONALLDATA_IDSPERLABELENTRY']._serialized_start=2925
+ _globals['_SINGLECOLLECTIONALLDATA_IDSPERLABELENTRY']._serialized_end=3016
+ _globals['_SINGLECOLLECTIONALLDATA_SUPPORTPERLABELENTRY']._serialized_start=3018
+ _globals['_SINGLECOLLECTIONALLDATA_SUPPORTPERLABELENTRY']._serialized_end=3114
+ _globals['_ALLDATA']._serialized_start=3116
+ _globals['_ALLDATA']._serialized_end=3222
+ _globals['_INTEGRALALLDATASIZE']._serialized_start=3224
+ _globals['_INTEGRALALLDATASIZE']._serialized_end=3319
+ _globals['_INTEGRALALLDATA']._serialized_start=3322
+ _globals['_INTEGRALALLDATA']._serialized_end=3462
+ _globals['_CREATEWITHDATARESPONSE']._serialized_start=3464
+ _globals['_CREATEWITHDATARESPONSE']._serialized_end=3550
+ _globals['_COLLECTIONSERVICE']._serialized_start=3553
+ _globals['_COLLECTIONSERVICE']._serialized_end=5282
# @@protoc_insertion_point(module_scope)
diff --git a/src/ansys/grpc/dpf/collection_pb2_grpc.py b/src/ansys/grpc/dpf/collection_pb2_grpc.py
index 1d7a8d6485..4b5adec311 100644
--- a/src/ansys/grpc/dpf/collection_pb2_grpc.py
+++ b/src/ansys/grpc/dpf/collection_pb2_grpc.py
@@ -3,6 +3,7 @@
import grpc
import ansys.grpc.dpf.base_pb2 as base__pb2
+import ansys.grpc.dpf.collection_message_pb2 as collection__message__pb2
import ansys.grpc.dpf.collection_pb2 as collection__pb2
import ansys.grpc.dpf.support_pb2 as support__pb2
@@ -19,7 +20,17 @@ def __init__(self, channel):
self.Create = channel.unary_unary(
'/ansys.api.dpf.collection.v0.CollectionService/Create',
request_serializer=collection__pb2.CollectionRequest.SerializeToString,
- response_deserializer=collection__pb2.Collection.FromString,
+ response_deserializer=collection__message__pb2.Collection.FromString,
+ )
+ self.CreateWithData = channel.stream_stream(
+ '/ansys.api.dpf.collection.v0.CollectionService/CreateWithData',
+ request_serializer=collection__pb2.AllData.SerializeToString,
+ response_deserializer=collection__pb2.CreateWithDataResponse.FromString,
+ )
+ self.CreateIntegralWithData = channel.stream_stream(
+ '/ansys.api.dpf.collection.v0.CollectionService/CreateIntegralWithData',
+ request_serializer=collection__pb2.IntegralAllData.SerializeToString,
+ response_deserializer=collection__pb2.CreateWithDataResponse.FromString,
)
self.Update = channel.unary_unary(
'/ansys.api.dpf.collection.v0.CollectionService/Update',
@@ -43,7 +54,7 @@ def __init__(self, channel):
)
self.List = channel.unary_unary(
'/ansys.api.dpf.collection.v0.CollectionService/List',
- request_serializer=collection__pb2.Collection.SerializeToString,
+ request_serializer=collection__message__pb2.Collection.SerializeToString,
response_deserializer=collection__pb2.ListResponse.FromString,
)
self.GetEntries = channel.unary_unary(
@@ -88,7 +99,7 @@ def __init__(self, channel):
)
self.Delete = channel.unary_unary(
'/ansys.api.dpf.collection.v0.CollectionService/Delete',
- request_serializer=collection__pb2.Collection.SerializeToString,
+ request_serializer=collection__message__pb2.Collection.SerializeToString,
response_deserializer=base__pb2.Empty.FromString,
)
@@ -102,6 +113,18 @@ def Create(self, request, context):
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
+ def CreateWithData(self, request_iterator, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def CreateIntegralWithData(self, request_iterator, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
def Update(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
@@ -199,7 +222,17 @@ def add_CollectionServiceServicer_to_server(servicer, server):
'Create': grpc.unary_unary_rpc_method_handler(
servicer.Create,
request_deserializer=collection__pb2.CollectionRequest.FromString,
- response_serializer=collection__pb2.Collection.SerializeToString,
+ response_serializer=collection__message__pb2.Collection.SerializeToString,
+ ),
+ 'CreateWithData': grpc.stream_stream_rpc_method_handler(
+ servicer.CreateWithData,
+ request_deserializer=collection__pb2.AllData.FromString,
+ response_serializer=collection__pb2.CreateWithDataResponse.SerializeToString,
+ ),
+ 'CreateIntegralWithData': grpc.stream_stream_rpc_method_handler(
+ servicer.CreateIntegralWithData,
+ request_deserializer=collection__pb2.IntegralAllData.FromString,
+ response_serializer=collection__pb2.CreateWithDataResponse.SerializeToString,
),
'Update': grpc.unary_unary_rpc_method_handler(
servicer.Update,
@@ -223,7 +256,7 @@ def add_CollectionServiceServicer_to_server(servicer, server):
),
'List': grpc.unary_unary_rpc_method_handler(
servicer.List,
- request_deserializer=collection__pb2.Collection.FromString,
+ request_deserializer=collection__message__pb2.Collection.FromString,
response_serializer=collection__pb2.ListResponse.SerializeToString,
),
'GetEntries': grpc.unary_unary_rpc_method_handler(
@@ -268,7 +301,7 @@ def add_CollectionServiceServicer_to_server(servicer, server):
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
- request_deserializer=collection__pb2.Collection.FromString,
+ request_deserializer=collection__message__pb2.Collection.FromString,
response_serializer=base__pb2.Empty.SerializeToString,
),
}
@@ -294,7 +327,41 @@ def Create(request,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ansys.api.dpf.collection.v0.CollectionService/Create',
collection__pb2.CollectionRequest.SerializeToString,
- collection__pb2.Collection.FromString,
+ collection__message__pb2.Collection.FromString,
+ options, channel_credentials,
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def CreateWithData(request_iterator,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.stream_stream(request_iterator, target, '/ansys.api.dpf.collection.v0.CollectionService/CreateWithData',
+ collection__pb2.AllData.SerializeToString,
+ collection__pb2.CreateWithDataResponse.FromString,
+ options, channel_credentials,
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def CreateIntegralWithData(request_iterator,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.stream_stream(request_iterator, target, '/ansys.api.dpf.collection.v0.CollectionService/CreateIntegralWithData',
+ collection__pb2.IntegralAllData.SerializeToString,
+ collection__pb2.CreateWithDataResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@@ -378,7 +445,7 @@ def List(request,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ansys.api.dpf.collection.v0.CollectionService/List',
- collection__pb2.Collection.SerializeToString,
+ collection__message__pb2.Collection.SerializeToString,
collection__pb2.ListResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@@ -531,7 +598,7 @@ def Delete(request,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ansys.api.dpf.collection.v0.CollectionService/Delete',
- collection__pb2.Collection.SerializeToString,
+ collection__message__pb2.Collection.SerializeToString,
base__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/src/ansys/grpc/dpf/data_tree_pb2.py b/src/ansys/grpc/dpf/data_tree_pb2.py
index 4d708d0634..5fc5a3ac0a 100644
--- a/src/ansys/grpc/dpf/data_tree_pb2.py
+++ b/src/ansys/grpc/dpf/data_tree_pb2.py
@@ -15,7 +15,7 @@
import ansys.grpc.dpf.base_pb2 as base__pb2
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0f\x64\x61ta_tree.proto\x12\x1a\x61nsys.api.dpf.data_tree.v0\x1a\nbase.proto\"?\n\x08\x44\x61taTree\x12\x33\n\x02id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\"x\n\rUpdateRequest\x12\x37\n\tdata_tree\x18\x01 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTree\x12.\n\x04\x64\x61ta\x18\x02 \x03(\x0b\x32 .ansys.api.dpf.data_tree.v0.Data\"\x82\x01\n\nGetRequest\x12\x37\n\tdata_tree\x18\x01 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTree\x12;\n\x04\x64\x61ta\x18\x02 \x03(\x0b\x32-.ansys.api.dpf.data_tree.v0.SingleDataRequest\"=\n\x0bGetResponse\x12.\n\x04\x64\x61ta\x18\x02 \x03(\x0b\x32 .ansys.api.dpf.data_tree.v0.Data\"T\n\nHasRequest\x12\x37\n\tdata_tree\x18\x01 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTree\x12\r\n\x05names\x18\x02 \x03(\t\"\x92\x01\n\x0bHasResponse\x12O\n\rhas_each_name\x18\x01 \x03(\x0b\x32\x38.ansys.api.dpf.data_tree.v0.HasResponse.HasEachNameEntry\x1a\x32\n\x10HasEachNameEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01\"\xba\x02\n\x04\x44\x61ta\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x03int\x18\x02 \x01(\x05H\x00\x12\x10\n\x06\x64ouble\x18\x03 \x01(\x01H\x00\x12\x10\n\x06string\x18\x04 \x01(\tH\x00\x12\x33\n\x07vec_int\x18\x05 \x01(\x0b\x32 .ansys.api.dpf.base.v0.IntVectorH\x00\x12\x39\n\nvec_double\x18\x06 \x01(\x0b\x32#.ansys.api.dpf.base.v0.DoubleVectorH\x00\x12\x39\n\nvec_string\x18\x07 \x01(\x0b\x32#.ansys.api.dpf.base.v0.StringVectorH\x00\x12\x39\n\tdata_tree\x18\x08 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTreeH\x00\x42\x0b\n\tdata_type\"L\n\x11SingleDataRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\"F\n\x0bListRequest\x12\x37\n\tdata_tree\x18\x01 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTree\"?\n\x0cListResponse\x12\x17\n\x0f\x61ttribute_names\x18\x01 \x03(\t\x12\x16\n\x0esub_tree_names\x18\x02 \x03(\t2\x8b\x04\n\x0f\x44\x61taTreeService\x12L\n\x06\x43reate\x12\x1c.ansys.api.dpf.base.v0.Empty\x1a$.ansys.api.dpf.data_tree.v0.DataTree\x12Q\n\x06Update\x12).ansys.api.dpf.data_tree.v0.UpdateRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12Y\n\x04List\x12\'.ansys.api.dpf.data_tree.v0.ListRequest\x1a(.ansys.api.dpf.data_tree.v0.ListResponse\x12V\n\x03Get\x12&.ansys.api.dpf.data_tree.v0.GetRequest\x1a\'.ansys.api.dpf.data_tree.v0.GetResponse\x12V\n\x03Has\x12&.ansys.api.dpf.data_tree.v0.HasRequest\x1a\'.ansys.api.dpf.data_tree.v0.HasResponse\x12L\n\x06\x44\x65lete\x12$.ansys.api.dpf.data_tree.v0.DataTree\x1a\x1c.ansys.api.dpf.base.v0.EmptyB\x1c\xaa\x02\x19\x41nsys.Api.Dpf.DataTree.V0b\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0f\x64\x61ta_tree.proto\x12\x1a\x61nsys.api.dpf.data_tree.v0\x1a\nbase.proto\"?\n\x08\x44\x61taTree\x12\x33\n\x02id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\"x\n\rUpdateRequest\x12\x37\n\tdata_tree\x18\x01 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTree\x12.\n\x04\x64\x61ta\x18\x02 \x03(\x0b\x32 .ansys.api.dpf.data_tree.v0.Data\"\x82\x01\n\nGetRequest\x12\x37\n\tdata_tree\x18\x01 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTree\x12;\n\x04\x64\x61ta\x18\x02 \x03(\x0b\x32-.ansys.api.dpf.data_tree.v0.SingleDataRequest\"=\n\x0bGetResponse\x12.\n\x04\x64\x61ta\x18\x02 \x03(\x0b\x32 .ansys.api.dpf.data_tree.v0.Data\"T\n\nHasRequest\x12\x37\n\tdata_tree\x18\x01 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTree\x12\r\n\x05names\x18\x02 \x03(\t\"\x92\x01\n\x0bHasResponse\x12O\n\rhas_each_name\x18\x01 \x03(\x0b\x32\x38.ansys.api.dpf.data_tree.v0.HasResponse.HasEachNameEntry\x1a\x32\n\x10HasEachNameEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01\"\xba\x02\n\x04\x44\x61ta\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x03int\x18\x02 \x01(\x05H\x00\x12\x10\n\x06\x64ouble\x18\x03 \x01(\x01H\x00\x12\x10\n\x06string\x18\x04 \x01(\tH\x00\x12\x33\n\x07vec_int\x18\x05 \x01(\x0b\x32 .ansys.api.dpf.base.v0.IntVectorH\x00\x12\x39\n\nvec_double\x18\x06 \x01(\x0b\x32#.ansys.api.dpf.base.v0.DoubleVectorH\x00\x12\x39\n\nvec_string\x18\x07 \x01(\x0b\x32#.ansys.api.dpf.base.v0.StringVectorH\x00\x12\x39\n\tdata_tree\x18\x08 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTreeH\x00\x42\x0b\n\tdata_type\"L\n\x11SingleDataRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\"F\n\x0bListRequest\x12\x37\n\tdata_tree\x18\x01 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTree\"?\n\x0cListResponse\x12\x17\n\x0f\x61ttribute_names\x18\x01 \x03(\t\x12\x16\n\x0esub_tree_names\x18\x02 \x03(\t\"\x97\x02\n\x0c\x44\x61taTreeNode\x12L\n\nattributes\x18\x01 \x03(\x0b\x32\x38.ansys.api.dpf.data_tree.v0.DataTreeNode.AttributesEntry\x12Q\n\rsubtree_index\x18\x02 \x03(\x0b\x32:.ansys.api.dpf.data_tree.v0.DataTreeNode.SubtreeIndexEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x33\n\x11SubtreeIndexEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x04:\x02\x38\x01\"W\n\x07\x41llData\x12\x38\n\x06values\x18\x01 \x03(\x0b\x32(.ansys.api.dpf.data_tree.v0.DataTreeNode\x12\x12\n\ntree_index\x18\x02 \x03(\x04\"R\n\x16\x43reateWithDataResponse\x12\x38\n\ndata_trees\x18\x01 \x03(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTree2\xfa\x04\n\x0f\x44\x61taTreeService\x12L\n\x06\x43reate\x12\x1c.ansys.api.dpf.base.v0.Empty\x1a$.ansys.api.dpf.data_tree.v0.DataTree\x12m\n\x0e\x43reateWithData\x12#.ansys.api.dpf.data_tree.v0.AllData\x1a\x32.ansys.api.dpf.data_tree.v0.CreateWithDataResponse(\x01\x30\x01\x12Q\n\x06Update\x12).ansys.api.dpf.data_tree.v0.UpdateRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12Y\n\x04List\x12\'.ansys.api.dpf.data_tree.v0.ListRequest\x1a(.ansys.api.dpf.data_tree.v0.ListResponse\x12V\n\x03Get\x12&.ansys.api.dpf.data_tree.v0.GetRequest\x1a\'.ansys.api.dpf.data_tree.v0.GetResponse\x12V\n\x03Has\x12&.ansys.api.dpf.data_tree.v0.HasRequest\x1a\'.ansys.api.dpf.data_tree.v0.HasResponse\x12L\n\x06\x44\x65lete\x12$.ansys.api.dpf.data_tree.v0.DataTree\x1a\x1c.ansys.api.dpf.base.v0.EmptyB\x1c\xaa\x02\x19\x41nsys.Api.Dpf.DataTree.V0b\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -25,6 +25,10 @@
_globals['DESCRIPTOR']._serialized_options = b'\252\002\031Ansys.Api.Dpf.DataTree.V0'
_globals['_HASRESPONSE_HASEACHNAMEENTRY']._options = None
_globals['_HASRESPONSE_HASEACHNAMEENTRY']._serialized_options = b'8\001'
+ _globals['_DATATREENODE_ATTRIBUTESENTRY']._options = None
+ _globals['_DATATREENODE_ATTRIBUTESENTRY']._serialized_options = b'8\001'
+ _globals['_DATATREENODE_SUBTREEINDEXENTRY']._options = None
+ _globals['_DATATREENODE_SUBTREEINDEXENTRY']._serialized_options = b'8\001'
_globals['_DATATREE']._serialized_start=59
_globals['_DATATREE']._serialized_end=122
_globals['_UPDATEREQUEST']._serialized_start=124
@@ -47,6 +51,16 @@
_globals['_LISTREQUEST']._serialized_end=1142
_globals['_LISTRESPONSE']._serialized_start=1144
_globals['_LISTRESPONSE']._serialized_end=1207
- _globals['_DATATREESERVICE']._serialized_start=1210
- _globals['_DATATREESERVICE']._serialized_end=1733
+ _globals['_DATATREENODE']._serialized_start=1210
+ _globals['_DATATREENODE']._serialized_end=1489
+ _globals['_DATATREENODE_ATTRIBUTESENTRY']._serialized_start=1387
+ _globals['_DATATREENODE_ATTRIBUTESENTRY']._serialized_end=1436
+ _globals['_DATATREENODE_SUBTREEINDEXENTRY']._serialized_start=1438
+ _globals['_DATATREENODE_SUBTREEINDEXENTRY']._serialized_end=1489
+ _globals['_ALLDATA']._serialized_start=1491
+ _globals['_ALLDATA']._serialized_end=1578
+ _globals['_CREATEWITHDATARESPONSE']._serialized_start=1580
+ _globals['_CREATEWITHDATARESPONSE']._serialized_end=1662
+ _globals['_DATATREESERVICE']._serialized_start=1665
+ _globals['_DATATREESERVICE']._serialized_end=2299
# @@protoc_insertion_point(module_scope)
diff --git a/src/ansys/grpc/dpf/data_tree_pb2_grpc.py b/src/ansys/grpc/dpf/data_tree_pb2_grpc.py
index 21fd175283..0b83c4382e 100644
--- a/src/ansys/grpc/dpf/data_tree_pb2_grpc.py
+++ b/src/ansys/grpc/dpf/data_tree_pb2_grpc.py
@@ -20,6 +20,11 @@ def __init__(self, channel):
request_serializer=base__pb2.Empty.SerializeToString,
response_deserializer=data__tree__pb2.DataTree.FromString,
)
+ self.CreateWithData = channel.stream_stream(
+ '/ansys.api.dpf.data_tree.v0.DataTreeService/CreateWithData',
+ request_serializer=data__tree__pb2.AllData.SerializeToString,
+ response_deserializer=data__tree__pb2.CreateWithDataResponse.FromString,
+ )
self.Update = channel.unary_unary(
'/ansys.api.dpf.data_tree.v0.DataTreeService/Update',
request_serializer=data__tree__pb2.UpdateRequest.SerializeToString,
@@ -56,6 +61,12 @@ def Create(self, request, context):
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
+ def CreateWithData(self, request_iterator, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
def Update(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
@@ -94,6 +105,11 @@ def add_DataTreeServiceServicer_to_server(servicer, server):
request_deserializer=base__pb2.Empty.FromString,
response_serializer=data__tree__pb2.DataTree.SerializeToString,
),
+ 'CreateWithData': grpc.stream_stream_rpc_method_handler(
+ servicer.CreateWithData,
+ request_deserializer=data__tree__pb2.AllData.FromString,
+ response_serializer=data__tree__pb2.CreateWithDataResponse.SerializeToString,
+ ),
'Update': grpc.unary_unary_rpc_method_handler(
servicer.Update,
request_deserializer=data__tree__pb2.UpdateRequest.FromString,
@@ -146,6 +162,23 @@ def Create(request,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+ @staticmethod
+ def CreateWithData(request_iterator,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.stream_stream(request_iterator, target, '/ansys.api.dpf.data_tree.v0.DataTreeService/CreateWithData',
+ data__tree__pb2.AllData.SerializeToString,
+ data__tree__pb2.CreateWithDataResponse.FromString,
+ options, channel_credentials,
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
@staticmethod
def Update(request,
target,
diff --git a/src/ansys/grpc/dpf/dpf_any_pb2.py b/src/ansys/grpc/dpf/dpf_any_pb2.py
index bbddc5ddd1..28aaa761fe 100644
--- a/src/ansys/grpc/dpf/dpf_any_pb2.py
+++ b/src/ansys/grpc/dpf/dpf_any_pb2.py
@@ -14,7 +14,7 @@
import ansys.grpc.dpf.base_pb2 as base__pb2
import ansys.grpc.dpf.dpf_any_message_pb2 as dpf__any__message__pb2
-import ansys.grpc.dpf.collection_pb2 as collection__pb2
+import ansys.grpc.dpf.collection_message_pb2 as collection__message__pb2
import ansys.grpc.dpf.field_pb2 as field__pb2
import ansys.grpc.dpf.scoping_pb2 as scoping__pb2
import ansys.grpc.dpf.data_sources_pb2 as data__sources__pb2
@@ -28,7 +28,7 @@
import ansys.grpc.dpf.data_tree_pb2 as data__tree__pb2
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\rdpf_any.proto\x12\x18\x61nsys.api.dpf.dpf_any.v0\x1a\nbase.proto\x1a\x15\x64pf_any_message.proto\x1a\x10\x63ollection.proto\x1a\x0b\x66ield.proto\x1a\rscoping.proto\x1a\x12\x64\x61ta_sources.proto\x1a\x13meshed_region.proto\x1a\x17time_freq_support.proto\x1a\x14\x63yclic_support.proto\x1a\x16workflow_message.proto\x1a\x11result_info.proto\x1a\x0eoperator.proto\x1a\x1cgeneric_data_container.proto\x1a\x0f\x64\x61ta_tree.proto\"$\n\x0cListResponse\x12\x14\n\x0cwrapped_type\x18\x01 \x01(\t\"\\\n\x0bTypeRequest\x12\x35\n\x03\x61ny\x18\x01 \x01(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAny\x12\x16\n\x0erequested_type\x18\x02 \x01(\t\"\"\n\x0cTypeResponse\x12\x12\n\nis_type_of\x18\x01 \x01(\x08\"\x9e\x01\n\x0cGetAsRequest\x12\x35\n\x03\x61ny\x18\x01 \x01(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAny\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\x12,\n\x07subtype\x18\x03 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\"\xf1\x06\n\rGetAsResponse\x12.\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.FieldH\x00\x12=\n\ncollection\x18\x02 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.CollectionH\x00\x12\x34\n\x07scoping\x18\x03 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.ScopingH\x00\x12\x42\n\x0c\x64\x61ta_sources\x18\x04 \x01(\x0b\x32*.ansys.api.dpf.data_sources.v0.DataSourcesH\x00\x12<\n\x04mesh\x18\x05 \x01(\x0b\x32,.ansys.api.dpf.meshed_region.v0.MeshedRegionH\x00\x12\x45\n\x0b\x63yc_support\x18\x06 \x01(\x0b\x32..ansys.api.dpf.cyclic_support.v0.CyclicSupportH\x00\x12P\n\x11time_freq_support\x18\x07 \x01(\x0b\x32\x33.ansys.api.dpf.time_freq_support.v0.TimeFreqSupportH\x00\x12?\n\x08workflow\x18\x08 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.WorkflowH\x00\x12;\n\x08operator\x18\t \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.OperatorH\x00\x12?\n\x0bresult_info\x18\n \x01(\x0b\x32(.ansys.api.dpf.result_info.v0.ResultInfoH\x00\x12_\n\x16generic_data_container\x18\x0b \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainerH\x00\x12\x39\n\tdata_tree\x18\x0f \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTreeH\x00\x12\x11\n\x07int_val\x18\x0c \x01(\x05H\x00\x12\x14\n\nstring_val\x18\r \x01(\tH\x00\x12\x14\n\ndouble_val\x18\x0e \x01(\x01H\x00\x42\x06\n\x04\x64\x61ta\"\xb8\x01\n\rCreateRequest\x12)\n\x04type\x18\x01 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\x12\x35\n\x02id\x18\x02 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifierH\x00\x12\x11\n\x07int_val\x18\x03 \x01(\x05H\x00\x12\x14\n\nstring_val\x18\x04 \x01(\tH\x00\x12\x14\n\ndouble_val\x18\x05 \x01(\x01H\x00\x42\x06\n\x04\x64\x61ta\"A\n\x12GetAsArrayResponse\x12+\n\x05\x61rray\x18\x01 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.Array\"o\n\x15\x43reateStreamedRequest\x12)\n\x04type\x18\x01 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\x12+\n\x05\x61rray\x18\x02 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.Array2\xd1\x04\n\rDpfAnyService\x12[\n\x06\x43reate\x12\'.ansys.api.dpf.dpf_any.v0.CreateRequest\x1a(.ansys.api.dpf.dpf_any_message.v0.DpfAny\x12X\n\x04List\x12(.ansys.api.dpf.dpf_any_message.v0.DpfAny\x1a&.ansys.api.dpf.dpf_any.v0.ListResponse\x12W\n\x06IsType\x12%.ansys.api.dpf.dpf_any.v0.TypeRequest\x1a&.ansys.api.dpf.dpf_any.v0.TypeResponse\x12X\n\x05GetAs\x12&.ansys.api.dpf.dpf_any.v0.GetAsRequest\x1a\'.ansys.api.dpf.dpf_any.v0.GetAsResponse\x12m\n\x0e\x43reateStreamed\x12/.ansys.api.dpf.dpf_any.v0.CreateStreamedRequest\x1a(.ansys.api.dpf.dpf_any_message.v0.DpfAny(\x01\x12g\n\rGetAsStreamed\x12&.ansys.api.dpf.dpf_any.v0.GetAsRequest\x1a,.ansys.api.dpf.dpf_any.v0.GetAsArrayResponse0\x01\x42\x1a\xaa\x02\x17\x41nsys.Api.Dpf.DpfAny.V0b\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\rdpf_any.proto\x12\x18\x61nsys.api.dpf.dpf_any.v0\x1a\nbase.proto\x1a\x15\x64pf_any_message.proto\x1a\x18\x63ollection_message.proto\x1a\x0b\x66ield.proto\x1a\rscoping.proto\x1a\x12\x64\x61ta_sources.proto\x1a\x13meshed_region.proto\x1a\x17time_freq_support.proto\x1a\x14\x63yclic_support.proto\x1a\x16workflow_message.proto\x1a\x11result_info.proto\x1a\x0eoperator.proto\x1a\x1cgeneric_data_container.proto\x1a\x0f\x64\x61ta_tree.proto\"$\n\x0cListResponse\x12\x14\n\x0cwrapped_type\x18\x01 \x01(\t\"\\\n\x0bTypeRequest\x12\x35\n\x03\x61ny\x18\x01 \x01(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAny\x12\x16\n\x0erequested_type\x18\x02 \x01(\t\"\"\n\x0cTypeResponse\x12\x12\n\nis_type_of\x18\x01 \x01(\x08\"\x9e\x01\n\x0cGetAsRequest\x12\x35\n\x03\x61ny\x18\x01 \x01(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAny\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\x12,\n\x07subtype\x18\x03 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\"\xf1\x06\n\rGetAsResponse\x12.\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.FieldH\x00\x12=\n\ncollection\x18\x02 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.CollectionH\x00\x12\x34\n\x07scoping\x18\x03 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.ScopingH\x00\x12\x42\n\x0c\x64\x61ta_sources\x18\x04 \x01(\x0b\x32*.ansys.api.dpf.data_sources.v0.DataSourcesH\x00\x12<\n\x04mesh\x18\x05 \x01(\x0b\x32,.ansys.api.dpf.meshed_region.v0.MeshedRegionH\x00\x12\x45\n\x0b\x63yc_support\x18\x06 \x01(\x0b\x32..ansys.api.dpf.cyclic_support.v0.CyclicSupportH\x00\x12P\n\x11time_freq_support\x18\x07 \x01(\x0b\x32\x33.ansys.api.dpf.time_freq_support.v0.TimeFreqSupportH\x00\x12?\n\x08workflow\x18\x08 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.WorkflowH\x00\x12;\n\x08operator\x18\t \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.OperatorH\x00\x12?\n\x0bresult_info\x18\n \x01(\x0b\x32(.ansys.api.dpf.result_info.v0.ResultInfoH\x00\x12_\n\x16generic_data_container\x18\x0b \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainerH\x00\x12\x39\n\tdata_tree\x18\x0f \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTreeH\x00\x12\x11\n\x07int_val\x18\x0c \x01(\x05H\x00\x12\x14\n\nstring_val\x18\r \x01(\tH\x00\x12\x14\n\ndouble_val\x18\x0e \x01(\x01H\x00\x42\x06\n\x04\x64\x61ta\"\xb8\x01\n\rCreateRequest\x12)\n\x04type\x18\x01 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\x12\x35\n\x02id\x18\x02 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifierH\x00\x12\x11\n\x07int_val\x18\x03 \x01(\x05H\x00\x12\x14\n\nstring_val\x18\x04 \x01(\tH\x00\x12\x14\n\ndouble_val\x18\x05 \x01(\x01H\x00\x42\x06\n\x04\x64\x61ta\"A\n\x12GetAsArrayResponse\x12+\n\x05\x61rray\x18\x01 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.Array\"o\n\x15\x43reateStreamedRequest\x12)\n\x04type\x18\x01 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\x12+\n\x05\x61rray\x18\x02 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.Array\"6\n\x07\x41llData\x12+\n\x05\x61rray\x18\x01 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.Array\"P\n\x16\x43reateWithDataResponse\x12\x36\n\x04\x61nys\x18\x01 \x03(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAny2\xbc\x05\n\rDpfAnyService\x12[\n\x06\x43reate\x12\'.ansys.api.dpf.dpf_any.v0.CreateRequest\x1a(.ansys.api.dpf.dpf_any_message.v0.DpfAny\x12i\n\x0e\x43reateWithData\x12!.ansys.api.dpf.dpf_any.v0.AllData\x1a\x30.ansys.api.dpf.dpf_any.v0.CreateWithDataResponse(\x01\x30\x01\x12X\n\x04List\x12(.ansys.api.dpf.dpf_any_message.v0.DpfAny\x1a&.ansys.api.dpf.dpf_any.v0.ListResponse\x12W\n\x06IsType\x12%.ansys.api.dpf.dpf_any.v0.TypeRequest\x1a&.ansys.api.dpf.dpf_any.v0.TypeResponse\x12X\n\x05GetAs\x12&.ansys.api.dpf.dpf_any.v0.GetAsRequest\x1a\'.ansys.api.dpf.dpf_any.v0.GetAsResponse\x12m\n\x0e\x43reateStreamed\x12/.ansys.api.dpf.dpf_any.v0.CreateStreamedRequest\x1a(.ansys.api.dpf.dpf_any_message.v0.DpfAny(\x01\x12g\n\rGetAsStreamed\x12&.ansys.api.dpf.dpf_any.v0.GetAsRequest\x1a,.ansys.api.dpf.dpf_any.v0.GetAsArrayResponse0\x01\x42\x1a\xaa\x02\x17\x41nsys.Api.Dpf.DpfAny.V0b\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -36,22 +36,26 @@
if _descriptor._USE_C_DESCRIPTORS == False:
_globals['DESCRIPTOR']._options = None
_globals['DESCRIPTOR']._serialized_options = b'\252\002\027Ansys.Api.Dpf.DpfAny.V0'
- _globals['_LISTRESPONSE']._serialized_start=318
- _globals['_LISTRESPONSE']._serialized_end=354
- _globals['_TYPEREQUEST']._serialized_start=356
- _globals['_TYPEREQUEST']._serialized_end=448
- _globals['_TYPERESPONSE']._serialized_start=450
- _globals['_TYPERESPONSE']._serialized_end=484
- _globals['_GETASREQUEST']._serialized_start=487
- _globals['_GETASREQUEST']._serialized_end=645
- _globals['_GETASRESPONSE']._serialized_start=648
- _globals['_GETASRESPONSE']._serialized_end=1529
- _globals['_CREATEREQUEST']._serialized_start=1532
- _globals['_CREATEREQUEST']._serialized_end=1716
- _globals['_GETASARRAYRESPONSE']._serialized_start=1718
- _globals['_GETASARRAYRESPONSE']._serialized_end=1783
- _globals['_CREATESTREAMEDREQUEST']._serialized_start=1785
- _globals['_CREATESTREAMEDREQUEST']._serialized_end=1896
- _globals['_DPFANYSERVICE']._serialized_start=1899
- _globals['_DPFANYSERVICE']._serialized_end=2492
+ _globals['_LISTRESPONSE']._serialized_start=326
+ _globals['_LISTRESPONSE']._serialized_end=362
+ _globals['_TYPEREQUEST']._serialized_start=364
+ _globals['_TYPEREQUEST']._serialized_end=456
+ _globals['_TYPERESPONSE']._serialized_start=458
+ _globals['_TYPERESPONSE']._serialized_end=492
+ _globals['_GETASREQUEST']._serialized_start=495
+ _globals['_GETASREQUEST']._serialized_end=653
+ _globals['_GETASRESPONSE']._serialized_start=656
+ _globals['_GETASRESPONSE']._serialized_end=1537
+ _globals['_CREATEREQUEST']._serialized_start=1540
+ _globals['_CREATEREQUEST']._serialized_end=1724
+ _globals['_GETASARRAYRESPONSE']._serialized_start=1726
+ _globals['_GETASARRAYRESPONSE']._serialized_end=1791
+ _globals['_CREATESTREAMEDREQUEST']._serialized_start=1793
+ _globals['_CREATESTREAMEDREQUEST']._serialized_end=1904
+ _globals['_ALLDATA']._serialized_start=1906
+ _globals['_ALLDATA']._serialized_end=1960
+ _globals['_CREATEWITHDATARESPONSE']._serialized_start=1962
+ _globals['_CREATEWITHDATARESPONSE']._serialized_end=2042
+ _globals['_DPFANYSERVICE']._serialized_start=2045
+ _globals['_DPFANYSERVICE']._serialized_end=2745
# @@protoc_insertion_point(module_scope)
diff --git a/src/ansys/grpc/dpf/dpf_any_pb2_grpc.py b/src/ansys/grpc/dpf/dpf_any_pb2_grpc.py
index d2eff048b0..5825a4e9ff 100644
--- a/src/ansys/grpc/dpf/dpf_any_pb2_grpc.py
+++ b/src/ansys/grpc/dpf/dpf_any_pb2_grpc.py
@@ -20,6 +20,11 @@ def __init__(self, channel):
request_serializer=dpf__any__pb2.CreateRequest.SerializeToString,
response_deserializer=dpf__any__message__pb2.DpfAny.FromString,
)
+ self.CreateWithData = channel.stream_stream(
+ '/ansys.api.dpf.dpf_any.v0.DpfAnyService/CreateWithData',
+ request_serializer=dpf__any__pb2.AllData.SerializeToString,
+ response_deserializer=dpf__any__pb2.CreateWithDataResponse.FromString,
+ )
self.List = channel.unary_unary(
'/ansys.api.dpf.dpf_any.v0.DpfAnyService/List',
request_serializer=dpf__any__message__pb2.DpfAny.SerializeToString,
@@ -56,6 +61,12 @@ def Create(self, request, context):
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
+ def CreateWithData(self, request_iterator, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
def List(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
@@ -94,6 +105,11 @@ def add_DpfAnyServiceServicer_to_server(servicer, server):
request_deserializer=dpf__any__pb2.CreateRequest.FromString,
response_serializer=dpf__any__message__pb2.DpfAny.SerializeToString,
),
+ 'CreateWithData': grpc.stream_stream_rpc_method_handler(
+ servicer.CreateWithData,
+ request_deserializer=dpf__any__pb2.AllData.FromString,
+ response_serializer=dpf__any__pb2.CreateWithDataResponse.SerializeToString,
+ ),
'List': grpc.unary_unary_rpc_method_handler(
servicer.List,
request_deserializer=dpf__any__message__pb2.DpfAny.FromString,
@@ -146,6 +162,23 @@ def Create(request,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+ @staticmethod
+ def CreateWithData(request_iterator,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.stream_stream(request_iterator, target, '/ansys.api.dpf.dpf_any.v0.DpfAnyService/CreateWithData',
+ dpf__any__pb2.AllData.SerializeToString,
+ dpf__any__pb2.CreateWithDataResponse.FromString,
+ options, channel_credentials,
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
@staticmethod
def List(request,
target,
diff --git a/src/ansys/grpc/dpf/field_pb2.py b/src/ansys/grpc/dpf/field_pb2.py
index 8a6bc275ac..40f1e15f31 100644
--- a/src/ansys/grpc/dpf/field_pb2.py
+++ b/src/ansys/grpc/dpf/field_pb2.py
@@ -17,9 +17,10 @@
import ansys.grpc.dpf.field_definition_pb2 as field__definition__pb2
import ansys.grpc.dpf.support_pb2 as support__pb2
import ansys.grpc.dpf.data_tree_pb2 as data__tree__pb2
+import ansys.grpc.dpf.collection_message_pb2 as collection__message__pb2
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x66ield.proto\x12\x16\x61nsys.api.dpf.field.v0\x1a\nbase.proto\x1a\rscoping.proto\x1a\x16\x66ield_definition.proto\x1a\rsupport.proto\x1a\x0f\x64\x61ta_tree.proto\"P\n\x14\x43ustomTypeDefinition\x12\x18\n\x10unitary_datatype\x18\x01 \x01(\t\x12\x1e\n\x16num_bytes_unitary_data\x18\x02 \x01(\x05\"\x95\x01\n\x05\x46ield\x12\x33\n\x02id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\x12\x10\n\x08\x64\x61tatype\x18\x02 \x01(\t\x12\x45\n\x0f\x63ustom_type_def\x18\x03 \x01(\x0b\x32,.ansys.api.dpf.field.v0.CustomTypeDefinition\"\xc5\x02\n\x0c\x46ieldRequest\x12-\n\x06nature\x18\x01 \x01(\x0e\x32\x1d.ansys.api.dpf.base.v0.Nature\x12\x31\n\x08location\x18\x02 \x01(\x0b\x32\x1f.ansys.api.dpf.base.v0.Location\x12/\n\x04size\x18\x03 \x01(\x0b\x32!.ansys.api.dpf.field.v0.FieldSize\x12\x10\n\x08\x64\x61tatype\x18\x04 \x01(\t\x12I\n\x0e\x64imensionality\x18\x05 \x01(\x0b\x32\x31.ansys.api.dpf.field_definition.v0.Dimensionality\x12\x45\n\x0f\x63ustom_type_def\x18\x06 \x01(\x0b\x32,.ansys.api.dpf.field.v0.CustomTypeDefinition\"\x8d\x01\n\x0e\x41\x64\x64\x44\x61taRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12M\n\x13\x65lemdata_containers\x18\x02 \x01(\x0b\x32\x30.ansys.api.dpf.field.v0.ElementaryDataContainers\"x\n\x14UpdateScopingRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12\x32\n\x07scoping\x18\x02 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.Scoping\"\x93\x01\n\x1cUpdateFieldDefinitionRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12\x45\n\tfield_def\x18\x02 \x01(\x0b\x32\x32.ansys.api.dpf.field_definition.v0.FieldDefinition\"\x9a\x01\n\x1bUpdateElementaryDataRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12M\n\x13\x65lemdata_containers\x18\x02 \x01(\x0b\x32\x30.ansys.api.dpf.field.v0.ElementaryDataContainers\"\x83\x01\n\x11UpdateSizeRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12/\n\x04size\x18\x02 \x01(\x0b\x32!.ansys.api.dpf.field.v0.FieldSize\x12\x0f\n\x07reserve\x18\x03 \x01(\x08\"Z\n\x1cUpdateDataPointerSizeRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12\x0c\n\x04size\x18\x02 \x01(\x05\"4\n\tFieldSize\x12\x14\n\x0cscoping_size\x18\x01 \x01(\x05\x12\x11\n\tdata_size\x18\x02 \x01(\x05\"\xae\x02\n\x04\x44\x61ta\x12\x39\n\ndatadouble\x18\x02 \x01(\x0b\x32#.ansys.api.dpf.base.v0.DoubleVectorH\x00\x12\x33\n\x07\x64\x61taint\x18\x03 \x01(\x0b\x32 .ansys.api.dpf.base.v0.IntVectorH\x00\x12\x37\n\tdatafloat\x18\x04 \x01(\x0b\x32\".ansys.api.dpf.base.v0.FloatVectorH\x00\x12\x39\n\ndatastring\x18\x01 \x01(\x0b\x32#.ansys.api.dpf.base.v0.StringVectorH\x00\x12\x35\n\x08\x64\x61tabyte\x18\x05 \x01(\x0b\x32!.ansys.api.dpf.base.v0.ByteVectorH\x00\x42\x0b\n\tdatatypes\"q\n\x18\x45lementaryDataContainers\x12\x12\n\nscoping_id\x18\x01 \x01(\x05\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.ansys.api.dpf.field.v0.Data\x12\x15\n\rscoping_index\x18\x03 \x01(\x05\";\n\x0bListRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\":\n\nGetRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\"s\n\x18GetElementaryDataRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12\x0f\n\x05index\x18\x02 \x01(\x05H\x00\x12\x0c\n\x02id\x18\x03 \x01(\x05H\x00\x42\n\n\x08index_id\"\x1d\n\x0cListResponse\x12\r\n\x05\x61rray\x18\x01 \x01(\x0c\"j\n\x19GetElementaryDataResponse\x12M\n\x13\x65lemdata_containers\x18\x01 \x01(\x0b\x32\x30.ansys.api.dpf.field.v0.ElementaryDataContainers\"H\n\x12GetScopingResponse\x12\x32\n\x07scoping\x18\x01 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.Scoping\"x\n\x1aGetFieldDefinitionResponse\x12L\n\x10\x66ield_definition\x18\x01 \x01(\x0b\x32\x32.ansys.api.dpf.field_definition.v0.FieldDefinition\x12\x0c\n\x04name\x18\x02 \x01(\t\"p\n\x0c\x43ountRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12\x32\n\x06\x65ntity\x18\x02 \x01(\x0e\x32\".ansys.api.dpf.base.v0.CountEntity\"i\n\x0eSupportRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\"A\n\x11PropertiesRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\"M\n\x12PropertiesResponse\x12\x37\n\tdata_tree\x18\x01 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTree\"u\n\x11SetSupportRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12\x32\n\x07support\x18\x02 \x01(\x0b\x32!.ansys.api.dpf.support.v0.Support\"P\n\x11UpdateDataRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12\r\n\x05\x61rray\x18\x02 \x01(\x0c\x32\x8f\x0e\n\x0c\x46ieldService\x12M\n\x06\x43reate\x12$.ansys.api.dpf.field.v0.FieldRequest\x1a\x1d.ansys.api.dpf.field.v0.Field\x12O\n\x07\x41\x64\x64\x44\x61ta\x12&.ansys.api.dpf.field.v0.AddDataRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12W\n\nUpdateData\x12).ansys.api.dpf.field.v0.UpdateDataRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty(\x01\x12^\n\x11UpdateDataPointer\x12).ansys.api.dpf.field.v0.UpdateDataRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty(\x01\x12[\n\rUpdateScoping\x12,.ansys.api.dpf.field.v0.UpdateScopingRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12U\n\nUpdateSize\x12).ansys.api.dpf.field.v0.UpdateSizeRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12k\n\x15UpdateDataPointerSize\x12\x34.ansys.api.dpf.field.v0.UpdateDataPointerSizeRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12k\n\x15UpdateFieldDefinition\x12\x34.ansys.api.dpf.field.v0.UpdateFieldDefinitionRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12i\n\x14UpdateElementaryData\x12\x33.ansys.api.dpf.field.v0.UpdateElementaryDataRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12S\n\x04List\x12#.ansys.api.dpf.field.v0.ListRequest\x1a$.ansys.api.dpf.field.v0.ListResponse0\x01\x12^\n\x0fListDataPointer\x12#.ansys.api.dpf.field.v0.ListRequest\x1a$.ansys.api.dpf.field.v0.ListResponse0\x01\x12\\\n\nGetScoping\x12\".ansys.api.dpf.field.v0.GetRequest\x1a*.ansys.api.dpf.field.v0.GetScopingResponse\x12W\n\nGetSupport\x12&.ansys.api.dpf.field.v0.SupportRequest\x1a!.ansys.api.dpf.support.v0.Support\x12\x66\n\rGetProperties\x12).ansys.api.dpf.field.v0.PropertiesRequest\x1a*.ansys.api.dpf.field.v0.PropertiesResponse\x12U\n\nSetSupport\x12).ansys.api.dpf.field.v0.SetSupportRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12l\n\x12GetFieldDefinition\x12\".ansys.api.dpf.field.v0.GetRequest\x1a\x32.ansys.api.dpf.field.v0.GetFieldDefinitionResponse\x12x\n\x11GetElementaryData\x12\x30.ansys.api.dpf.field.v0.GetElementaryDataRequest\x1a\x31.ansys.api.dpf.field.v0.GetElementaryDataResponse\x12S\n\x05\x43ount\x12$.ansys.api.dpf.field.v0.CountRequest\x1a$.ansys.api.dpf.base.v0.CountResponse\x12\x45\n\x06\x44\x65lete\x12\x1d.ansys.api.dpf.field.v0.Field\x1a\x1c.ansys.api.dpf.base.v0.EmptyB\x19\xaa\x02\x16\x41nsys.Api.Dpf.Field.V0b\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x66ield.proto\x12\x16\x61nsys.api.dpf.field.v0\x1a\nbase.proto\x1a\rscoping.proto\x1a\x16\x66ield_definition.proto\x1a\rsupport.proto\x1a\x0f\x64\x61ta_tree.proto\x1a\x18\x63ollection_message.proto\"P\n\x14\x43ustomTypeDefinition\x12\x18\n\x10unitary_datatype\x18\x01 \x01(\t\x12\x1e\n\x16num_bytes_unitary_data\x18\x02 \x01(\x05\"\x95\x01\n\x05\x46ield\x12\x33\n\x02id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\x12\x10\n\x08\x64\x61tatype\x18\x02 \x01(\t\x12\x45\n\x0f\x63ustom_type_def\x18\x03 \x01(\x0b\x32,.ansys.api.dpf.field.v0.CustomTypeDefinition\"\xc5\x02\n\x0c\x46ieldRequest\x12-\n\x06nature\x18\x01 \x01(\x0e\x32\x1d.ansys.api.dpf.base.v0.Nature\x12\x31\n\x08location\x18\x02 \x01(\x0b\x32\x1f.ansys.api.dpf.base.v0.Location\x12/\n\x04size\x18\x03 \x01(\x0b\x32!.ansys.api.dpf.field.v0.FieldSize\x12\x10\n\x08\x64\x61tatype\x18\x04 \x01(\t\x12I\n\x0e\x64imensionality\x18\x05 \x01(\x0b\x32\x31.ansys.api.dpf.field_definition.v0.Dimensionality\x12\x45\n\x0f\x63ustom_type_def\x18\x06 \x01(\x0b\x32,.ansys.api.dpf.field.v0.CustomTypeDefinition\"\x8d\x01\n\x0e\x41\x64\x64\x44\x61taRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12M\n\x13\x65lemdata_containers\x18\x02 \x01(\x0b\x32\x30.ansys.api.dpf.field.v0.ElementaryDataContainers\"x\n\x14UpdateScopingRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12\x32\n\x07scoping\x18\x02 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.Scoping\"\x93\x01\n\x1cUpdateFieldDefinitionRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12\x45\n\tfield_def\x18\x02 \x01(\x0b\x32\x32.ansys.api.dpf.field_definition.v0.FieldDefinition\"\x9a\x01\n\x1bUpdateElementaryDataRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12M\n\x13\x65lemdata_containers\x18\x02 \x01(\x0b\x32\x30.ansys.api.dpf.field.v0.ElementaryDataContainers\"\x83\x01\n\x11UpdateSizeRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12/\n\x04size\x18\x02 \x01(\x0b\x32!.ansys.api.dpf.field.v0.FieldSize\x12\x0f\n\x07reserve\x18\x03 \x01(\x08\"Z\n\x1cUpdateDataPointerSizeRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12\x0c\n\x04size\x18\x02 \x01(\x05\"4\n\tFieldSize\x12\x14\n\x0cscoping_size\x18\x01 \x01(\x05\x12\x11\n\tdata_size\x18\x02 \x01(\x05\"\xae\x02\n\x04\x44\x61ta\x12\x39\n\ndatadouble\x18\x02 \x01(\x0b\x32#.ansys.api.dpf.base.v0.DoubleVectorH\x00\x12\x33\n\x07\x64\x61taint\x18\x03 \x01(\x0b\x32 .ansys.api.dpf.base.v0.IntVectorH\x00\x12\x37\n\tdatafloat\x18\x04 \x01(\x0b\x32\".ansys.api.dpf.base.v0.FloatVectorH\x00\x12\x39\n\ndatastring\x18\x01 \x01(\x0b\x32#.ansys.api.dpf.base.v0.StringVectorH\x00\x12\x35\n\x08\x64\x61tabyte\x18\x05 \x01(\x0b\x32!.ansys.api.dpf.base.v0.ByteVectorH\x00\x42\x0b\n\tdatatypes\"q\n\x18\x45lementaryDataContainers\x12\x12\n\nscoping_id\x18\x01 \x01(\x05\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.ansys.api.dpf.field.v0.Data\x12\x15\n\rscoping_index\x18\x03 \x01(\x05\";\n\x0bListRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\":\n\nGetRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\"s\n\x18GetElementaryDataRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12\x0f\n\x05index\x18\x02 \x01(\x05H\x00\x12\x0c\n\x02id\x18\x03 \x01(\x05H\x00\x42\n\n\x08index_id\"\x1d\n\x0cListResponse\x12\r\n\x05\x61rray\x18\x01 \x01(\x0c\"j\n\x19GetElementaryDataResponse\x12M\n\x13\x65lemdata_containers\x18\x01 \x01(\x0b\x32\x30.ansys.api.dpf.field.v0.ElementaryDataContainers\"H\n\x12GetScopingResponse\x12\x32\n\x07scoping\x18\x01 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.Scoping\"x\n\x1aGetFieldDefinitionResponse\x12L\n\x10\x66ield_definition\x18\x01 \x01(\x0b\x32\x32.ansys.api.dpf.field_definition.v0.FieldDefinition\x12\x0c\n\x04name\x18\x02 \x01(\t\"p\n\x0c\x43ountRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12\x32\n\x06\x65ntity\x18\x02 \x01(\x0e\x32\".ansys.api.dpf.base.v0.CountEntity\"i\n\x0eSupportRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\"A\n\x11PropertiesRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\"M\n\x12PropertiesResponse\x12\x37\n\tdata_tree\x18\x01 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTree\"u\n\x11SetSupportRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12\x32\n\x07support\x18\x02 \x01(\x0b\x32!.ansys.api.dpf.support.v0.Support\"P\n\x11UpdateDataRequest\x12,\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field\x12\r\n\x05\x61rray\x18\x02 \x01(\x0c\"\x8b\x03\n\x12SingleFieldAllData\x12\x45\n\x0f\x63ustom_type_def\x18\x01 \x01(\x0b\x32,.ansys.api.dpf.field.v0.CustomTypeDefinition\x12\x32\n\x07scoping\x18\x02 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.Scoping\x12P\n\x10\x66ield_definition\x18\x03 \x01(\x0b\x32\x36.ansys.api.dpf.field_definition.v0.FieldDefinitionData\x12\x35\n\x04\x64\x61ta\x18\x04 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12=\n\x0c\x64\x61ta_pointer\x18\x05 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12\x32\n\x07support\x18\x06 \x01(\x0b\x32!.ansys.api.dpf.support.v0.Support\"W\n\x07\x41llData\x12\x10\n\x08\x64\x61tatype\x18\x01 \x01(\t\x12:\n\x06\x66ields\x18\x02 \x03(\x0b\x32*.ansys.api.dpf.field.v0.SingleFieldAllData\"G\n\x16\x43reateWithDataResponse\x12-\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x1d.ansys.api.dpf.field.v0.Field2\xc4\x0f\n\x0c\x46ieldService\x12M\n\x06\x43reate\x12$.ansys.api.dpf.field.v0.FieldRequest\x1a\x1d.ansys.api.dpf.field.v0.Field\x12\x65\n\x0e\x43reateWithData\x12\x1f.ansys.api.dpf.field.v0.AllData\x1a..ansys.api.dpf.field.v0.CreateWithDataResponse(\x01\x30\x01\x12O\n\x07\x41\x64\x64\x44\x61ta\x12&.ansys.api.dpf.field.v0.AddDataRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12W\n\nUpdateData\x12).ansys.api.dpf.field.v0.UpdateDataRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty(\x01\x12^\n\x11UpdateDataPointer\x12).ansys.api.dpf.field.v0.UpdateDataRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty(\x01\x12[\n\rUpdateScoping\x12,.ansys.api.dpf.field.v0.UpdateScopingRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12U\n\nUpdateSize\x12).ansys.api.dpf.field.v0.UpdateSizeRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12k\n\x15UpdateDataPointerSize\x12\x34.ansys.api.dpf.field.v0.UpdateDataPointerSizeRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12k\n\x15UpdateFieldDefinition\x12\x34.ansys.api.dpf.field.v0.UpdateFieldDefinitionRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12i\n\x14UpdateElementaryData\x12\x33.ansys.api.dpf.field.v0.UpdateElementaryDataRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12S\n\x04List\x12#.ansys.api.dpf.field.v0.ListRequest\x1a$.ansys.api.dpf.field.v0.ListResponse0\x01\x12^\n\x0fListDataPointer\x12#.ansys.api.dpf.field.v0.ListRequest\x1a$.ansys.api.dpf.field.v0.ListResponse0\x01\x12\\\n\nGetScoping\x12\".ansys.api.dpf.field.v0.GetRequest\x1a*.ansys.api.dpf.field.v0.GetScopingResponse\x12W\n\nGetSupport\x12&.ansys.api.dpf.field.v0.SupportRequest\x1a!.ansys.api.dpf.support.v0.Support\x12\x66\n\rGetProperties\x12).ansys.api.dpf.field.v0.PropertiesRequest\x1a*.ansys.api.dpf.field.v0.PropertiesResponse\x12U\n\nSetSupport\x12).ansys.api.dpf.field.v0.SetSupportRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12l\n\x12GetFieldDefinition\x12\".ansys.api.dpf.field.v0.GetRequest\x1a\x32.ansys.api.dpf.field.v0.GetFieldDefinitionResponse\x12x\n\x11GetElementaryData\x12\x30.ansys.api.dpf.field.v0.GetElementaryDataRequest\x1a\x31.ansys.api.dpf.field.v0.GetElementaryDataResponse\x12S\n\x05\x43ount\x12$.ansys.api.dpf.field.v0.CountRequest\x1a$.ansys.api.dpf.base.v0.CountResponse\x12L\n\nGetAllData\x12\x1d.ansys.api.dpf.field.v0.Field\x1a\x1f.ansys.api.dpf.field.v0.AllData\x12\x45\n\x06\x44\x65lete\x12\x1d.ansys.api.dpf.field.v0.Field\x1a\x1c.ansys.api.dpf.base.v0.EmptyB\x19\xaa\x02\x16\x41nsys.Api.Dpf.Field.V0b\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -27,56 +28,62 @@
if _descriptor._USE_C_DESCRIPTORS == False:
_globals['DESCRIPTOR']._options = None
_globals['DESCRIPTOR']._serialized_options = b'\252\002\026Ansys.Api.Dpf.Field.V0'
- _globals['_CUSTOMTYPEDEFINITION']._serialized_start=122
- _globals['_CUSTOMTYPEDEFINITION']._serialized_end=202
- _globals['_FIELD']._serialized_start=205
- _globals['_FIELD']._serialized_end=354
- _globals['_FIELDREQUEST']._serialized_start=357
- _globals['_FIELDREQUEST']._serialized_end=682
- _globals['_ADDDATAREQUEST']._serialized_start=685
- _globals['_ADDDATAREQUEST']._serialized_end=826
- _globals['_UPDATESCOPINGREQUEST']._serialized_start=828
- _globals['_UPDATESCOPINGREQUEST']._serialized_end=948
- _globals['_UPDATEFIELDDEFINITIONREQUEST']._serialized_start=951
- _globals['_UPDATEFIELDDEFINITIONREQUEST']._serialized_end=1098
- _globals['_UPDATEELEMENTARYDATAREQUEST']._serialized_start=1101
- _globals['_UPDATEELEMENTARYDATAREQUEST']._serialized_end=1255
- _globals['_UPDATESIZEREQUEST']._serialized_start=1258
- _globals['_UPDATESIZEREQUEST']._serialized_end=1389
- _globals['_UPDATEDATAPOINTERSIZEREQUEST']._serialized_start=1391
- _globals['_UPDATEDATAPOINTERSIZEREQUEST']._serialized_end=1481
- _globals['_FIELDSIZE']._serialized_start=1483
- _globals['_FIELDSIZE']._serialized_end=1535
- _globals['_DATA']._serialized_start=1538
- _globals['_DATA']._serialized_end=1840
- _globals['_ELEMENTARYDATACONTAINERS']._serialized_start=1842
- _globals['_ELEMENTARYDATACONTAINERS']._serialized_end=1955
- _globals['_LISTREQUEST']._serialized_start=1957
- _globals['_LISTREQUEST']._serialized_end=2016
- _globals['_GETREQUEST']._serialized_start=2018
- _globals['_GETREQUEST']._serialized_end=2076
- _globals['_GETELEMENTARYDATAREQUEST']._serialized_start=2078
- _globals['_GETELEMENTARYDATAREQUEST']._serialized_end=2193
- _globals['_LISTRESPONSE']._serialized_start=2195
- _globals['_LISTRESPONSE']._serialized_end=2224
- _globals['_GETELEMENTARYDATARESPONSE']._serialized_start=2226
- _globals['_GETELEMENTARYDATARESPONSE']._serialized_end=2332
- _globals['_GETSCOPINGRESPONSE']._serialized_start=2334
- _globals['_GETSCOPINGRESPONSE']._serialized_end=2406
- _globals['_GETFIELDDEFINITIONRESPONSE']._serialized_start=2408
- _globals['_GETFIELDDEFINITIONRESPONSE']._serialized_end=2528
- _globals['_COUNTREQUEST']._serialized_start=2530
- _globals['_COUNTREQUEST']._serialized_end=2642
- _globals['_SUPPORTREQUEST']._serialized_start=2644
- _globals['_SUPPORTREQUEST']._serialized_end=2749
- _globals['_PROPERTIESREQUEST']._serialized_start=2751
- _globals['_PROPERTIESREQUEST']._serialized_end=2816
- _globals['_PROPERTIESRESPONSE']._serialized_start=2818
- _globals['_PROPERTIESRESPONSE']._serialized_end=2895
- _globals['_SETSUPPORTREQUEST']._serialized_start=2897
- _globals['_SETSUPPORTREQUEST']._serialized_end=3014
- _globals['_UPDATEDATAREQUEST']._serialized_start=3016
- _globals['_UPDATEDATAREQUEST']._serialized_end=3096
- _globals['_FIELDSERVICE']._serialized_start=3099
- _globals['_FIELDSERVICE']._serialized_end=4906
+ _globals['_CUSTOMTYPEDEFINITION']._serialized_start=148
+ _globals['_CUSTOMTYPEDEFINITION']._serialized_end=228
+ _globals['_FIELD']._serialized_start=231
+ _globals['_FIELD']._serialized_end=380
+ _globals['_FIELDREQUEST']._serialized_start=383
+ _globals['_FIELDREQUEST']._serialized_end=708
+ _globals['_ADDDATAREQUEST']._serialized_start=711
+ _globals['_ADDDATAREQUEST']._serialized_end=852
+ _globals['_UPDATESCOPINGREQUEST']._serialized_start=854
+ _globals['_UPDATESCOPINGREQUEST']._serialized_end=974
+ _globals['_UPDATEFIELDDEFINITIONREQUEST']._serialized_start=977
+ _globals['_UPDATEFIELDDEFINITIONREQUEST']._serialized_end=1124
+ _globals['_UPDATEELEMENTARYDATAREQUEST']._serialized_start=1127
+ _globals['_UPDATEELEMENTARYDATAREQUEST']._serialized_end=1281
+ _globals['_UPDATESIZEREQUEST']._serialized_start=1284
+ _globals['_UPDATESIZEREQUEST']._serialized_end=1415
+ _globals['_UPDATEDATAPOINTERSIZEREQUEST']._serialized_start=1417
+ _globals['_UPDATEDATAPOINTERSIZEREQUEST']._serialized_end=1507
+ _globals['_FIELDSIZE']._serialized_start=1509
+ _globals['_FIELDSIZE']._serialized_end=1561
+ _globals['_DATA']._serialized_start=1564
+ _globals['_DATA']._serialized_end=1866
+ _globals['_ELEMENTARYDATACONTAINERS']._serialized_start=1868
+ _globals['_ELEMENTARYDATACONTAINERS']._serialized_end=1981
+ _globals['_LISTREQUEST']._serialized_start=1983
+ _globals['_LISTREQUEST']._serialized_end=2042
+ _globals['_GETREQUEST']._serialized_start=2044
+ _globals['_GETREQUEST']._serialized_end=2102
+ _globals['_GETELEMENTARYDATAREQUEST']._serialized_start=2104
+ _globals['_GETELEMENTARYDATAREQUEST']._serialized_end=2219
+ _globals['_LISTRESPONSE']._serialized_start=2221
+ _globals['_LISTRESPONSE']._serialized_end=2250
+ _globals['_GETELEMENTARYDATARESPONSE']._serialized_start=2252
+ _globals['_GETELEMENTARYDATARESPONSE']._serialized_end=2358
+ _globals['_GETSCOPINGRESPONSE']._serialized_start=2360
+ _globals['_GETSCOPINGRESPONSE']._serialized_end=2432
+ _globals['_GETFIELDDEFINITIONRESPONSE']._serialized_start=2434
+ _globals['_GETFIELDDEFINITIONRESPONSE']._serialized_end=2554
+ _globals['_COUNTREQUEST']._serialized_start=2556
+ _globals['_COUNTREQUEST']._serialized_end=2668
+ _globals['_SUPPORTREQUEST']._serialized_start=2670
+ _globals['_SUPPORTREQUEST']._serialized_end=2775
+ _globals['_PROPERTIESREQUEST']._serialized_start=2777
+ _globals['_PROPERTIESREQUEST']._serialized_end=2842
+ _globals['_PROPERTIESRESPONSE']._serialized_start=2844
+ _globals['_PROPERTIESRESPONSE']._serialized_end=2921
+ _globals['_SETSUPPORTREQUEST']._serialized_start=2923
+ _globals['_SETSUPPORTREQUEST']._serialized_end=3040
+ _globals['_UPDATEDATAREQUEST']._serialized_start=3042
+ _globals['_UPDATEDATAREQUEST']._serialized_end=3122
+ _globals['_SINGLEFIELDALLDATA']._serialized_start=3125
+ _globals['_SINGLEFIELDALLDATA']._serialized_end=3520
+ _globals['_ALLDATA']._serialized_start=3522
+ _globals['_ALLDATA']._serialized_end=3609
+ _globals['_CREATEWITHDATARESPONSE']._serialized_start=3611
+ _globals['_CREATEWITHDATARESPONSE']._serialized_end=3682
+ _globals['_FIELDSERVICE']._serialized_start=3685
+ _globals['_FIELDSERVICE']._serialized_end=5673
# @@protoc_insertion_point(module_scope)
diff --git a/src/ansys/grpc/dpf/field_pb2_grpc.py b/src/ansys/grpc/dpf/field_pb2_grpc.py
index 74d808542f..7416cfbe89 100644
--- a/src/ansys/grpc/dpf/field_pb2_grpc.py
+++ b/src/ansys/grpc/dpf/field_pb2_grpc.py
@@ -21,6 +21,11 @@ def __init__(self, channel):
request_serializer=field__pb2.FieldRequest.SerializeToString,
response_deserializer=field__pb2.Field.FromString,
)
+ self.CreateWithData = channel.stream_stream(
+ '/ansys.api.dpf.field.v0.FieldService/CreateWithData',
+ request_serializer=field__pb2.AllData.SerializeToString,
+ response_deserializer=field__pb2.CreateWithDataResponse.FromString,
+ )
self.AddData = channel.unary_unary(
'/ansys.api.dpf.field.v0.FieldService/AddData',
request_serializer=field__pb2.AddDataRequest.SerializeToString,
@@ -106,6 +111,11 @@ def __init__(self, channel):
request_serializer=field__pb2.CountRequest.SerializeToString,
response_deserializer=base__pb2.CountResponse.FromString,
)
+ self.GetAllData = channel.unary_unary(
+ '/ansys.api.dpf.field.v0.FieldService/GetAllData',
+ request_serializer=field__pb2.Field.SerializeToString,
+ response_deserializer=field__pb2.AllData.FromString,
+ )
self.Delete = channel.unary_unary(
'/ansys.api.dpf.field.v0.FieldService/Delete',
request_serializer=field__pb2.Field.SerializeToString,
@@ -122,6 +132,12 @@ def Create(self, request, context):
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
+ def CreateWithData(self, request_iterator, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
def AddData(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
@@ -232,6 +248,12 @@ def Count(self, request, context):
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
+ def GetAllData(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
def Delete(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
@@ -246,6 +268,11 @@ def add_FieldServiceServicer_to_server(servicer, server):
request_deserializer=field__pb2.FieldRequest.FromString,
response_serializer=field__pb2.Field.SerializeToString,
),
+ 'CreateWithData': grpc.stream_stream_rpc_method_handler(
+ servicer.CreateWithData,
+ request_deserializer=field__pb2.AllData.FromString,
+ response_serializer=field__pb2.CreateWithDataResponse.SerializeToString,
+ ),
'AddData': grpc.unary_unary_rpc_method_handler(
servicer.AddData,
request_deserializer=field__pb2.AddDataRequest.FromString,
@@ -331,6 +358,11 @@ def add_FieldServiceServicer_to_server(servicer, server):
request_deserializer=field__pb2.CountRequest.FromString,
response_serializer=base__pb2.CountResponse.SerializeToString,
),
+ 'GetAllData': grpc.unary_unary_rpc_method_handler(
+ servicer.GetAllData,
+ request_deserializer=field__pb2.Field.FromString,
+ response_serializer=field__pb2.AllData.SerializeToString,
+ ),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=field__pb2.Field.FromString,
@@ -363,6 +395,23 @@ def Create(request,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+ @staticmethod
+ def CreateWithData(request_iterator,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.stream_stream(request_iterator, target, '/ansys.api.dpf.field.v0.FieldService/CreateWithData',
+ field__pb2.AllData.SerializeToString,
+ field__pb2.CreateWithDataResponse.FromString,
+ options, channel_credentials,
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
@staticmethod
def AddData(request,
target,
@@ -652,6 +701,23 @@ def Count(request,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+ @staticmethod
+ def GetAllData(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/ansys.api.dpf.field.v0.FieldService/GetAllData',
+ field__pb2.Field.SerializeToString,
+ field__pb2.AllData.FromString,
+ options, channel_credentials,
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
@staticmethod
def Delete(request,
target,
diff --git a/src/ansys/grpc/dpf/generic_data_container_pb2.py b/src/ansys/grpc/dpf/generic_data_container_pb2.py
index 2909e7c47c..d5e68b9c44 100644
--- a/src/ansys/grpc/dpf/generic_data_container_pb2.py
+++ b/src/ansys/grpc/dpf/generic_data_container_pb2.py
@@ -16,7 +16,7 @@
import ansys.grpc.dpf.dpf_any_message_pb2 as dpf__any__message__pb2
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgeneric_data_container.proto\x12\'ansys.api.dpf.generic_data_container.v0\x1a\nbase.proto\x1a\x15\x64pf_any_message.proto\"K\n\x14GenericDataContainer\x12\x33\n\x02id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\"\x0f\n\rCreateRequest\"w\n\x12GetPropertyRequest\x12J\n\x03gdc\x18\x01 \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainer\x12\x15\n\rproperty_name\x18\x02 \x03(\t\"L\n\x13GetPropertyResponse\x12\x35\n\x03\x61ny\x18\x01 \x03(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAny\"\xae\x01\n\x12SetPropertyRequest\x12J\n\x03gdc\x18\x01 \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainer\x12\x15\n\rproperty_name\x18\x02 \x03(\t\x12\x35\n\x03\x61ny\x18\x03 \x03(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAny\"\x15\n\x13SetPropertyResponse\"e\n\x17GetPropertyTypesRequest\x12J\n\x03gdc\x18\x01 \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainer\"2\n\x18GetPropertyTypesResponse\x12\x16\n\x0eproperty_types\x18\x01 \x03(\t\"e\n\x17GetPropertyNamesRequest\x12J\n\x03gdc\x18\x01 \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainer\"2\n\x18GetPropertyNamesResponse\x12\x16\n\x0eproperty_names\x18\x01 \x03(\t2\xcf\x06\n\x1bGenericDataContainerService\x12\x7f\n\x06\x43reate\x12\x36.ansys.api.dpf.generic_data_container.v0.CreateRequest\x1a=.ansys.api.dpf.generic_data_container.v0.GenericDataContainer\x12\x88\x01\n\x0bSetProperty\x12;.ansys.api.dpf.generic_data_container.v0.SetPropertyRequest\x1a<.ansys.api.dpf.generic_data_container.v0.SetPropertyResponse\x12\x88\x01\n\x0bGetProperty\x12;.ansys.api.dpf.generic_data_container.v0.GetPropertyRequest\x1a<.ansys.api.dpf.generic_data_container.v0.GetPropertyResponse\x12\x97\x01\n\x10GetPropertyTypes\x12@.ansys.api.dpf.generic_data_container.v0.GetPropertyTypesRequest\x1a\x41.ansys.api.dpf.generic_data_container.v0.GetPropertyTypesResponse\x12\x97\x01\n\x10GetPropertyNames\x12@.ansys.api.dpf.generic_data_container.v0.GetPropertyNamesRequest\x1a\x41.ansys.api.dpf.generic_data_container.v0.GetPropertyNamesResponse\x12\x65\n\x06\x44\x65lete\x12=.ansys.api.dpf.generic_data_container.v0.GenericDataContainer\x1a\x1c.ansys.api.dpf.base.v0.EmptyB(\xaa\x02%Ansys.Api.Dpf.GenericDataContainer.V0b\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgeneric_data_container.proto\x12\'ansys.api.dpf.generic_data_container.v0\x1a\nbase.proto\x1a\x15\x64pf_any_message.proto\"K\n\x14GenericDataContainer\x12\x33\n\x02id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\"\x0f\n\rCreateRequest\"w\n\x12GetPropertyRequest\x12J\n\x03gdc\x18\x01 \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainer\x12\x15\n\rproperty_name\x18\x02 \x03(\t\"L\n\x13GetPropertyResponse\x12\x35\n\x03\x61ny\x18\x01 \x03(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAny\"\xae\x01\n\x12SetPropertyRequest\x12J\n\x03gdc\x18\x01 \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainer\x12\x15\n\rproperty_name\x18\x02 \x03(\t\x12\x35\n\x03\x61ny\x18\x03 \x03(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAny\"\x15\n\x13SetPropertyResponse\"e\n\x17GetPropertyTypesRequest\x12J\n\x03gdc\x18\x01 \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainer\"2\n\x18GetPropertyTypesResponse\x12\x16\n\x0eproperty_types\x18\x01 \x03(\t\"e\n\x17GetPropertyNamesRequest\x12J\n\x03gdc\x18\x01 \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainer\"2\n\x18GetPropertyNamesResponse\x12\x16\n\x0eproperty_names\x18\x01 \x03(\t\"\xab\x01\n\x08Property\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x03str\x18\x02 \x01(\tH\x00\x12\r\n\x03int\x18\x03 \x01(\x05H\x00\x12\x10\n\x06\x64ouble\x18\x04 \x01(\x01H\x00\x12\x37\n\x03\x61ny\x18\x07 \x01(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAnyH\x00\x12\x17\n\rsub_gdc_index\x18\x0b \x01(\x04H\x00\x42\x0f\n\rproperty_data\"j\n!SingleGenericDataContainerAllData\x12\x45\n\nproperties\x18\x01 \x03(\x0b\x32\x31.ansys.api.dpf.generic_data_container.v0.Property\"x\n\x07\x41llData\x12X\n\x04gdcs\x18\x01 \x03(\x0b\x32J.ansys.api.dpf.generic_data_container.v0.SingleGenericDataContainerAllData\x12\x13\n\x0bgdc_indices\x18\x02 \x03(\x04\"e\n\x16\x43reateWithDataResponse\x12K\n\x04gdcs\x18\x01 \x03(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainer2\xd9\x07\n\x1bGenericDataContainerService\x12\x7f\n\x06\x43reate\x12\x36.ansys.api.dpf.generic_data_container.v0.CreateRequest\x1a=.ansys.api.dpf.generic_data_container.v0.GenericDataContainer\x12\x87\x01\n\x0e\x43reateWithData\x12\x30.ansys.api.dpf.generic_data_container.v0.AllData\x1a?.ansys.api.dpf.generic_data_container.v0.CreateWithDataResponse(\x01\x30\x01\x12\x88\x01\n\x0bSetProperty\x12;.ansys.api.dpf.generic_data_container.v0.SetPropertyRequest\x1a<.ansys.api.dpf.generic_data_container.v0.SetPropertyResponse\x12\x88\x01\n\x0bGetProperty\x12;.ansys.api.dpf.generic_data_container.v0.GetPropertyRequest\x1a<.ansys.api.dpf.generic_data_container.v0.GetPropertyResponse\x12\x97\x01\n\x10GetPropertyTypes\x12@.ansys.api.dpf.generic_data_container.v0.GetPropertyTypesRequest\x1a\x41.ansys.api.dpf.generic_data_container.v0.GetPropertyTypesResponse\x12\x97\x01\n\x10GetPropertyNames\x12@.ansys.api.dpf.generic_data_container.v0.GetPropertyNamesRequest\x1a\x41.ansys.api.dpf.generic_data_container.v0.GetPropertyNamesResponse\x12\x65\n\x06\x44\x65lete\x12=.ansys.api.dpf.generic_data_container.v0.GenericDataContainer\x1a\x1c.ansys.api.dpf.base.v0.EmptyB(\xaa\x02%Ansys.Api.Dpf.GenericDataContainer.V0b\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -44,6 +44,14 @@
_globals['_GETPROPERTYNAMESREQUEST']._serialized_end=857
_globals['_GETPROPERTYNAMESRESPONSE']._serialized_start=859
_globals['_GETPROPERTYNAMESRESPONSE']._serialized_end=909
- _globals['_GENERICDATACONTAINERSERVICE']._serialized_start=912
- _globals['_GENERICDATACONTAINERSERVICE']._serialized_end=1759
+ _globals['_PROPERTY']._serialized_start=912
+ _globals['_PROPERTY']._serialized_end=1083
+ _globals['_SINGLEGENERICDATACONTAINERALLDATA']._serialized_start=1085
+ _globals['_SINGLEGENERICDATACONTAINERALLDATA']._serialized_end=1191
+ _globals['_ALLDATA']._serialized_start=1193
+ _globals['_ALLDATA']._serialized_end=1313
+ _globals['_CREATEWITHDATARESPONSE']._serialized_start=1315
+ _globals['_CREATEWITHDATARESPONSE']._serialized_end=1416
+ _globals['_GENERICDATACONTAINERSERVICE']._serialized_start=1419
+ _globals['_GENERICDATACONTAINERSERVICE']._serialized_end=2404
# @@protoc_insertion_point(module_scope)
diff --git a/src/ansys/grpc/dpf/generic_data_container_pb2_grpc.py b/src/ansys/grpc/dpf/generic_data_container_pb2_grpc.py
index 9bbd812d99..2b14b070d4 100644
--- a/src/ansys/grpc/dpf/generic_data_container_pb2_grpc.py
+++ b/src/ansys/grpc/dpf/generic_data_container_pb2_grpc.py
@@ -20,6 +20,11 @@ def __init__(self, channel):
request_serializer=generic__data__container__pb2.CreateRequest.SerializeToString,
response_deserializer=generic__data__container__pb2.GenericDataContainer.FromString,
)
+ self.CreateWithData = channel.stream_stream(
+ '/ansys.api.dpf.generic_data_container.v0.GenericDataContainerService/CreateWithData',
+ request_serializer=generic__data__container__pb2.AllData.SerializeToString,
+ response_deserializer=generic__data__container__pb2.CreateWithDataResponse.FromString,
+ )
self.SetProperty = channel.unary_unary(
'/ansys.api.dpf.generic_data_container.v0.GenericDataContainerService/SetProperty',
request_serializer=generic__data__container__pb2.SetPropertyRequest.SerializeToString,
@@ -56,6 +61,12 @@ def Create(self, request, context):
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
+ def CreateWithData(self, request_iterator, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
def SetProperty(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
@@ -94,6 +105,11 @@ def add_GenericDataContainerServiceServicer_to_server(servicer, server):
request_deserializer=generic__data__container__pb2.CreateRequest.FromString,
response_serializer=generic__data__container__pb2.GenericDataContainer.SerializeToString,
),
+ 'CreateWithData': grpc.stream_stream_rpc_method_handler(
+ servicer.CreateWithData,
+ request_deserializer=generic__data__container__pb2.AllData.FromString,
+ response_serializer=generic__data__container__pb2.CreateWithDataResponse.SerializeToString,
+ ),
'SetProperty': grpc.unary_unary_rpc_method_handler(
servicer.SetProperty,
request_deserializer=generic__data__container__pb2.SetPropertyRequest.FromString,
@@ -146,6 +162,23 @@ def Create(request,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+ @staticmethod
+ def CreateWithData(request_iterator,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.stream_stream(request_iterator, target, '/ansys.api.dpf.generic_data_container.v0.GenericDataContainerService/CreateWithData',
+ generic__data__container__pb2.AllData.SerializeToString,
+ generic__data__container__pb2.CreateWithDataResponse.FromString,
+ options, channel_credentials,
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
@staticmethod
def SetProperty(request,
target,
diff --git a/src/ansys/grpc/dpf/operator_pb2.py b/src/ansys/grpc/dpf/operator_pb2.py
index 408f4ff0b0..7ead9af4b0 100644
--- a/src/ansys/grpc/dpf/operator_pb2.py
+++ b/src/ansys/grpc/dpf/operator_pb2.py
@@ -12,7 +12,7 @@
_sym_db = _symbol_database.Default()
-import ansys.grpc.dpf.collection_pb2 as collection__pb2
+import ansys.grpc.dpf.collection_message_pb2 as collection__message__pb2
import ansys.grpc.dpf.field_pb2 as field__pb2
import ansys.grpc.dpf.scoping_pb2 as scoping__pb2
import ansys.grpc.dpf.base_pb2 as base__pb2
@@ -29,7 +29,7 @@
import ansys.grpc.dpf.generic_data_container_pb2 as generic__data__container__pb2
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0eoperator.proto\x12\x1d\x61nsys.api.dpf.dpf_operator.v0\x1a\x10\x63ollection.proto\x1a\x0b\x66ield.proto\x1a\rscoping.proto\x1a\nbase.proto\x1a\x12\x64\x61ta_sources.proto\x1a\x13meshed_region.proto\x1a\x17time_freq_support.proto\x1a\x11result_info.proto\x1a\x15operator_config.proto\x1a\x14\x63yclic_support.proto\x1a\x16workflow_message.proto\x1a\x15\x64pf_any_message.proto\x1a\x0f\x64\x61ta_tree.proto\x1a\x11label_space.proto\x1a\x1cgeneric_data_container.proto\"M\n\x08Operator\x12\x33\n\x02id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x88\x05\n\rSpecification\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12]\n\x12map_input_pin_spec\x18\x02 \x03(\x0b\x32\x41.ansys.api.dpf.dpf_operator.v0.Specification.MapInputPinSpecEntry\x12_\n\x13map_output_pin_spec\x18\x03 \x03(\x0b\x32\x42.ansys.api.dpf.dpf_operator.v0.Specification.MapOutputPinSpecEntry\x12J\n\x0b\x63onfig_spec\x18\x04 \x01(\x0b\x32\x35.ansys.api.dpf.operator_config.v0.ConfigSpecification\x12P\n\nproperties\x18\x05 \x03(\x0b\x32<.ansys.api.dpf.dpf_operator.v0.Specification.PropertiesEntry\x1ag\n\x14MapInputPinSpecEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12>\n\x05value\x18\x02 \x01(\x0b\x32/.ansys.api.dpf.dpf_operator.v0.PinSpecification:\x02\x38\x01\x1ah\n\x15MapOutputPinSpecEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12>\n\x05value\x18\x02 \x01(\x0b\x32/.ansys.api.dpf.dpf_operator.v0.PinSpecification:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x86\x01\n\x10PinSpecification\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ntype_names\x18\x02 \x03(\t\x12\x10\n\x08optional\x18\x03 \x01(\x08\x12\x10\n\x08\x64ocument\x18\x04 \x01(\t\x12\x10\n\x08\x65llipsis\x18\x05 \x01(\x08\x12\x1a\n\x12name_derived_class\x18\x06 \x01(\t\"Y\n\rOperatorInput\x12\x38\n\x07inputop\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.Operator\x12\x0e\n\x06pinOut\x18\x03 \x01(\x05\"\xa5\t\n\rUpdateRequest\x12\x33\n\x02op\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.Operator\x12\x0b\n\x03pin\x18\x02 \x01(\x05\x12\r\n\x03str\x18\x03 \x01(\tH\x00\x12\r\n\x03int\x18\x04 \x01(\x05H\x00\x12\x10\n\x06\x64ouble\x18\x05 \x01(\x01H\x00\x12\x0e\n\x04\x62ool\x18\x06 \x01(\x08H\x00\x12.\n\x05\x66ield\x18\x07 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.FieldH\x00\x12=\n\ncollection\x18\x08 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.CollectionH\x00\x12\x34\n\x07scoping\x18\t \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.ScopingH\x00\x12\x42\n\x0c\x64\x61ta_sources\x18\n \x01(\x0b\x32*.ansys.api.dpf.data_sources.v0.DataSourcesH\x00\x12<\n\x04mesh\x18\x0b \x01(\x0b\x32,.ansys.api.dpf.meshed_region.v0.MeshedRegionH\x00\x12\x30\n\x04vint\x18\x0c \x01(\x0b\x32 .ansys.api.dpf.base.v0.IntVectorH\x00\x12\x36\n\x07vdouble\x18\r \x01(\x0b\x32#.ansys.api.dpf.base.v0.DoubleVectorH\x00\x12\x45\n\x0b\x63yc_support\x18\x0e \x01(\x0b\x32..ansys.api.dpf.cyclic_support.v0.CyclicSupportH\x00\x12P\n\x11time_freq_support\x18\x0f \x01(\x0b\x32\x33.ansys.api.dpf.time_freq_support.v0.TimeFreqSupportH\x00\x12?\n\x08workflow\x18\x10 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.WorkflowH\x00\x12\x39\n\tdata_tree\x18\x12 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTreeH\x00\x12:\n\x06\x61s_any\x18\x13 \x01(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAnyH\x00\x12?\n\x0blabel_space\x18\x14 \x01(\x0b\x32(.ansys.api.dpf.label_space.v0.LabelSpaceH\x00\x12\x44\n\x11operator_as_input\x18\x15 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.OperatorH\x00\x12_\n\x16generic_data_container\x18\x16 \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainerH\x00\x12?\n\x07inputop\x18\x11 \x01(\x0b\x32,.ansys.api.dpf.dpf_operator.v0.OperatorInputH\x00\x42\x07\n\x05input\"\xae\x01\n\x12\x41rrayUpdateRequest\x12\x33\n\x02op\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.Operator\x12\x0b\n\x03pin\x18\x02 \x01(\x05\x12+\n\x05\x61rray\x18\x03 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.Array\x12)\n\x04type\x18\x04 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\"\x8c\x01\n\x13UpdateConfigRequest\x12\x33\n\x02op\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.Operator\x12@\n\x06\x63onfig\x18\x02 \x01(\x0b\x32\x30.ansys.api.dpf.operator_config.v0.OperatorConfig\"\xb6\x01\n\x19OperatorEvaluationRequest\x12\x33\n\x02op\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.Operator\x12\x0b\n\x03pin\x18\x02 \x01(\x05\x12)\n\x04type\x18\x03 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\x12,\n\x07subtype\x18\x04 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\"g\n\x15\x43reateOperatorRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12@\n\x06\x63onfig\x18\x02 \x01(\x0b\x32\x30.ansys.api.dpf.operator_config.v0.OperatorConfig\"\xb0\x07\n\x10OperatorResponse\x12\r\n\x03str\x18\x03 \x01(\tH\x00\x12\r\n\x03int\x18\x04 \x01(\x05H\x00\x12\x10\n\x06\x64ouble\x18\x05 \x01(\x01H\x00\x12\x0e\n\x04\x62ool\x18\x06 \x01(\x08H\x00\x12.\n\x05\x66ield\x18\x07 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.FieldH\x00\x12=\n\ncollection\x18\x08 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.CollectionH\x00\x12\x34\n\x07scoping\x18\t \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.ScopingH\x00\x12<\n\x04mesh\x18\n \x01(\x0b\x32,.ansys.api.dpf.meshed_region.v0.MeshedRegionH\x00\x12?\n\x0bresult_info\x18\x0b \x01(\x0b\x32(.ansys.api.dpf.result_info.v0.ResultInfoH\x00\x12P\n\x11time_freq_support\x18\x0c \x01(\x0b\x32\x33.ansys.api.dpf.time_freq_support.v0.TimeFreqSupportH\x00\x12\x42\n\x0c\x64\x61ta_sources\x18\r \x01(\x0b\x32*.ansys.api.dpf.data_sources.v0.DataSourcesH\x00\x12\x45\n\x0b\x63yc_support\x18\x0e \x01(\x0b\x32..ansys.api.dpf.cyclic_support.v0.CyclicSupportH\x00\x12?\n\x08workflow\x18\x0f \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.WorkflowH\x00\x12\x37\n\x03\x61ny\x18\x10 \x01(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAnyH\x00\x12;\n\x08operator\x18\x11 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.OperatorH\x00\x12\x39\n\tdata_tree\x18\x12 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTreeH\x00\x12_\n\x16generic_data_container\x18\x13 \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainerH\x00\x42\x08\n\x06output\"D\n\x15\x41rrayOperatorResponse\x12+\n\x05\x61rray\x18\x01 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.Array\"\x9d\x01\n\x0cListResponse\x12\x0f\n\x07op_name\x18\x01 \x01(\t\x12@\n\x06\x63onfig\x18\x02 \x01(\x0b\x32\x30.ansys.api.dpf.operator_config.v0.OperatorConfig\x12:\n\x04spec\x18\x03 \x01(\x0b\x32,.ansys.api.dpf.dpf_operator.v0.Specification\"G\n\x10GetStatusRequest\x12\x33\n\x02op\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.Operator\"#\n\x11GetStatusResponse\x12\x0e\n\x06status\x18\x01 \x01(\x05\"\x19\n\x17ListAllOperatorsRequest\")\n\x18ListAllOperatorsResponse\x12\r\n\x05\x61rray\x18\x01 \x01(\x0c\x32\xb1\x08\n\x0fOperatorService\x12g\n\x06\x43reate\x12\x34.ansys.api.dpf.dpf_operator.v0.CreateOperatorRequest\x1a\'.ansys.api.dpf.dpf_operator.v0.Operator\x12T\n\x06Update\x12,.ansys.api.dpf.dpf_operator.v0.UpdateRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12`\n\x0cUpdateConfig\x12\x32.ansys.api.dpf.dpf_operator.v0.UpdateConfigRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12p\n\x03Get\x12\x38.ansys.api.dpf.dpf_operator.v0.OperatorEvaluationRequest\x1a/.ansys.api.dpf.dpf_operator.v0.OperatorResponse\x12\\\n\x04List\x12\'.ansys.api.dpf.dpf_operator.v0.Operator\x1a+.ansys.api.dpf.dpf_operator.v0.ListResponse\x12n\n\tGetStatus\x12/.ansys.api.dpf.dpf_operator.v0.GetStatusRequest\x1a\x30.ansys.api.dpf.dpf_operator.v0.GetStatusResponse\x12O\n\x06\x44\x65lete\x12\'.ansys.api.dpf.dpf_operator.v0.Operator\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12\x85\x01\n\x10ListAllOperators\x12\x36.ansys.api.dpf.dpf_operator.v0.ListAllOperatorsRequest\x1a\x37.ansys.api.dpf.dpf_operator.v0.ListAllOperatorsResponse0\x01\x12\x63\n\x0eUpdateStreamed\x12\x31.ansys.api.dpf.dpf_operator.v0.ArrayUpdateRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty(\x01\x12\x7f\n\x0bGetStreamed\x12\x38.ansys.api.dpf.dpf_operator.v0.OperatorEvaluationRequest\x1a\x34.ansys.api.dpf.dpf_operator.v0.ArrayOperatorResponse0\x01\x42\x1c\xaa\x02\x19\x41nsys.Api.Dpf.Operator.V0b\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0eoperator.proto\x12\x1d\x61nsys.api.dpf.dpf_operator.v0\x1a\x18\x63ollection_message.proto\x1a\x0b\x66ield.proto\x1a\rscoping.proto\x1a\nbase.proto\x1a\x12\x64\x61ta_sources.proto\x1a\x13meshed_region.proto\x1a\x17time_freq_support.proto\x1a\x11result_info.proto\x1a\x15operator_config.proto\x1a\x14\x63yclic_support.proto\x1a\x16workflow_message.proto\x1a\x15\x64pf_any_message.proto\x1a\x0f\x64\x61ta_tree.proto\x1a\x11label_space.proto\x1a\x1cgeneric_data_container.proto\"M\n\x08Operator\x12\x33\n\x02id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x88\x05\n\rSpecification\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12]\n\x12map_input_pin_spec\x18\x02 \x03(\x0b\x32\x41.ansys.api.dpf.dpf_operator.v0.Specification.MapInputPinSpecEntry\x12_\n\x13map_output_pin_spec\x18\x03 \x03(\x0b\x32\x42.ansys.api.dpf.dpf_operator.v0.Specification.MapOutputPinSpecEntry\x12J\n\x0b\x63onfig_spec\x18\x04 \x01(\x0b\x32\x35.ansys.api.dpf.operator_config.v0.ConfigSpecification\x12P\n\nproperties\x18\x05 \x03(\x0b\x32<.ansys.api.dpf.dpf_operator.v0.Specification.PropertiesEntry\x1ag\n\x14MapInputPinSpecEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12>\n\x05value\x18\x02 \x01(\x0b\x32/.ansys.api.dpf.dpf_operator.v0.PinSpecification:\x02\x38\x01\x1ah\n\x15MapOutputPinSpecEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12>\n\x05value\x18\x02 \x01(\x0b\x32/.ansys.api.dpf.dpf_operator.v0.PinSpecification:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x86\x01\n\x10PinSpecification\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ntype_names\x18\x02 \x03(\t\x12\x10\n\x08optional\x18\x03 \x01(\x08\x12\x10\n\x08\x64ocument\x18\x04 \x01(\t\x12\x10\n\x08\x65llipsis\x18\x05 \x01(\x08\x12\x1a\n\x12name_derived_class\x18\x06 \x01(\t\"Y\n\rOperatorInput\x12\x38\n\x07inputop\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.Operator\x12\x0e\n\x06pinOut\x18\x03 \x01(\x05\"\xa5\t\n\rUpdateRequest\x12\x33\n\x02op\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.Operator\x12\x0b\n\x03pin\x18\x02 \x01(\x05\x12\r\n\x03str\x18\x03 \x01(\tH\x00\x12\r\n\x03int\x18\x04 \x01(\x05H\x00\x12\x10\n\x06\x64ouble\x18\x05 \x01(\x01H\x00\x12\x0e\n\x04\x62ool\x18\x06 \x01(\x08H\x00\x12.\n\x05\x66ield\x18\x07 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.FieldH\x00\x12=\n\ncollection\x18\x08 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.CollectionH\x00\x12\x34\n\x07scoping\x18\t \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.ScopingH\x00\x12\x42\n\x0c\x64\x61ta_sources\x18\n \x01(\x0b\x32*.ansys.api.dpf.data_sources.v0.DataSourcesH\x00\x12<\n\x04mesh\x18\x0b \x01(\x0b\x32,.ansys.api.dpf.meshed_region.v0.MeshedRegionH\x00\x12\x30\n\x04vint\x18\x0c \x01(\x0b\x32 .ansys.api.dpf.base.v0.IntVectorH\x00\x12\x36\n\x07vdouble\x18\r \x01(\x0b\x32#.ansys.api.dpf.base.v0.DoubleVectorH\x00\x12\x45\n\x0b\x63yc_support\x18\x0e \x01(\x0b\x32..ansys.api.dpf.cyclic_support.v0.CyclicSupportH\x00\x12P\n\x11time_freq_support\x18\x0f \x01(\x0b\x32\x33.ansys.api.dpf.time_freq_support.v0.TimeFreqSupportH\x00\x12?\n\x08workflow\x18\x10 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.WorkflowH\x00\x12\x39\n\tdata_tree\x18\x12 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTreeH\x00\x12:\n\x06\x61s_any\x18\x13 \x01(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAnyH\x00\x12?\n\x0blabel_space\x18\x14 \x01(\x0b\x32(.ansys.api.dpf.label_space.v0.LabelSpaceH\x00\x12\x44\n\x11operator_as_input\x18\x15 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.OperatorH\x00\x12_\n\x16generic_data_container\x18\x16 \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainerH\x00\x12?\n\x07inputop\x18\x11 \x01(\x0b\x32,.ansys.api.dpf.dpf_operator.v0.OperatorInputH\x00\x42\x07\n\x05input\"\xae\x01\n\x12\x41rrayUpdateRequest\x12\x33\n\x02op\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.Operator\x12\x0b\n\x03pin\x18\x02 \x01(\x05\x12+\n\x05\x61rray\x18\x03 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.Array\x12)\n\x04type\x18\x04 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\"\x8c\x01\n\x13UpdateConfigRequest\x12\x33\n\x02op\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.Operator\x12@\n\x06\x63onfig\x18\x02 \x01(\x0b\x32\x30.ansys.api.dpf.operator_config.v0.OperatorConfig\"\xb6\x01\n\x19OperatorEvaluationRequest\x12\x33\n\x02op\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.Operator\x12\x0b\n\x03pin\x18\x02 \x01(\x05\x12)\n\x04type\x18\x03 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\x12,\n\x07subtype\x18\x04 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\"g\n\x15\x43reateOperatorRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12@\n\x06\x63onfig\x18\x02 \x01(\x0b\x32\x30.ansys.api.dpf.operator_config.v0.OperatorConfig\"\xb0\x07\n\x10OperatorResponse\x12\r\n\x03str\x18\x03 \x01(\tH\x00\x12\r\n\x03int\x18\x04 \x01(\x05H\x00\x12\x10\n\x06\x64ouble\x18\x05 \x01(\x01H\x00\x12\x0e\n\x04\x62ool\x18\x06 \x01(\x08H\x00\x12.\n\x05\x66ield\x18\x07 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.FieldH\x00\x12=\n\ncollection\x18\x08 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.CollectionH\x00\x12\x34\n\x07scoping\x18\t \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.ScopingH\x00\x12<\n\x04mesh\x18\n \x01(\x0b\x32,.ansys.api.dpf.meshed_region.v0.MeshedRegionH\x00\x12?\n\x0bresult_info\x18\x0b \x01(\x0b\x32(.ansys.api.dpf.result_info.v0.ResultInfoH\x00\x12P\n\x11time_freq_support\x18\x0c \x01(\x0b\x32\x33.ansys.api.dpf.time_freq_support.v0.TimeFreqSupportH\x00\x12\x42\n\x0c\x64\x61ta_sources\x18\r \x01(\x0b\x32*.ansys.api.dpf.data_sources.v0.DataSourcesH\x00\x12\x45\n\x0b\x63yc_support\x18\x0e \x01(\x0b\x32..ansys.api.dpf.cyclic_support.v0.CyclicSupportH\x00\x12?\n\x08workflow\x18\x0f \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.WorkflowH\x00\x12\x37\n\x03\x61ny\x18\x10 \x01(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAnyH\x00\x12;\n\x08operator\x18\x11 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.OperatorH\x00\x12\x39\n\tdata_tree\x18\x12 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTreeH\x00\x12_\n\x16generic_data_container\x18\x13 \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainerH\x00\x42\x08\n\x06output\"D\n\x15\x41rrayOperatorResponse\x12+\n\x05\x61rray\x18\x01 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.Array\"\x9d\x01\n\x0cListResponse\x12\x0f\n\x07op_name\x18\x01 \x01(\t\x12@\n\x06\x63onfig\x18\x02 \x01(\x0b\x32\x30.ansys.api.dpf.operator_config.v0.OperatorConfig\x12:\n\x04spec\x18\x03 \x01(\x0b\x32,.ansys.api.dpf.dpf_operator.v0.Specification\"G\n\x10GetStatusRequest\x12\x33\n\x02op\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.Operator\"#\n\x11GetStatusResponse\x12\x0e\n\x06status\x18\x01 \x01(\x05\"\x19\n\x17ListAllOperatorsRequest\")\n\x18ListAllOperatorsResponse\x12\r\n\x05\x61rray\x18\x01 \x01(\x0c\x32\xb1\x08\n\x0fOperatorService\x12g\n\x06\x43reate\x12\x34.ansys.api.dpf.dpf_operator.v0.CreateOperatorRequest\x1a\'.ansys.api.dpf.dpf_operator.v0.Operator\x12T\n\x06Update\x12,.ansys.api.dpf.dpf_operator.v0.UpdateRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12`\n\x0cUpdateConfig\x12\x32.ansys.api.dpf.dpf_operator.v0.UpdateConfigRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12p\n\x03Get\x12\x38.ansys.api.dpf.dpf_operator.v0.OperatorEvaluationRequest\x1a/.ansys.api.dpf.dpf_operator.v0.OperatorResponse\x12\\\n\x04List\x12\'.ansys.api.dpf.dpf_operator.v0.Operator\x1a+.ansys.api.dpf.dpf_operator.v0.ListResponse\x12n\n\tGetStatus\x12/.ansys.api.dpf.dpf_operator.v0.GetStatusRequest\x1a\x30.ansys.api.dpf.dpf_operator.v0.GetStatusResponse\x12O\n\x06\x44\x65lete\x12\'.ansys.api.dpf.dpf_operator.v0.Operator\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12\x85\x01\n\x10ListAllOperators\x12\x36.ansys.api.dpf.dpf_operator.v0.ListAllOperatorsRequest\x1a\x37.ansys.api.dpf.dpf_operator.v0.ListAllOperatorsResponse0\x01\x12\x63\n\x0eUpdateStreamed\x12\x31.ansys.api.dpf.dpf_operator.v0.ArrayUpdateRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty(\x01\x12\x7f\n\x0bGetStreamed\x12\x38.ansys.api.dpf.dpf_operator.v0.OperatorEvaluationRequest\x1a\x34.ansys.api.dpf.dpf_operator.v0.ArrayOperatorResponse0\x01\x42\x1c\xaa\x02\x19\x41nsys.Api.Dpf.Operator.V0b\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -43,44 +43,44 @@
_globals['_SPECIFICATION_MAPOUTPUTPINSPECENTRY']._serialized_options = b'8\001'
_globals['_SPECIFICATION_PROPERTIESENTRY']._options = None
_globals['_SPECIFICATION_PROPERTIESENTRY']._serialized_options = b'8\001'
- _globals['_OPERATOR']._serialized_start=350
- _globals['_OPERATOR']._serialized_end=427
- _globals['_SPECIFICATION']._serialized_start=430
- _globals['_SPECIFICATION']._serialized_end=1078
- _globals['_SPECIFICATION_MAPINPUTPINSPECENTRY']._serialized_start=818
- _globals['_SPECIFICATION_MAPINPUTPINSPECENTRY']._serialized_end=921
- _globals['_SPECIFICATION_MAPOUTPUTPINSPECENTRY']._serialized_start=923
- _globals['_SPECIFICATION_MAPOUTPUTPINSPECENTRY']._serialized_end=1027
- _globals['_SPECIFICATION_PROPERTIESENTRY']._serialized_start=1029
- _globals['_SPECIFICATION_PROPERTIESENTRY']._serialized_end=1078
- _globals['_PINSPECIFICATION']._serialized_start=1081
- _globals['_PINSPECIFICATION']._serialized_end=1215
- _globals['_OPERATORINPUT']._serialized_start=1217
- _globals['_OPERATORINPUT']._serialized_end=1306
- _globals['_UPDATEREQUEST']._serialized_start=1309
- _globals['_UPDATEREQUEST']._serialized_end=2498
- _globals['_ARRAYUPDATEREQUEST']._serialized_start=2501
- _globals['_ARRAYUPDATEREQUEST']._serialized_end=2675
- _globals['_UPDATECONFIGREQUEST']._serialized_start=2678
- _globals['_UPDATECONFIGREQUEST']._serialized_end=2818
- _globals['_OPERATOREVALUATIONREQUEST']._serialized_start=2821
- _globals['_OPERATOREVALUATIONREQUEST']._serialized_end=3003
- _globals['_CREATEOPERATORREQUEST']._serialized_start=3005
- _globals['_CREATEOPERATORREQUEST']._serialized_end=3108
- _globals['_OPERATORRESPONSE']._serialized_start=3111
- _globals['_OPERATORRESPONSE']._serialized_end=4055
- _globals['_ARRAYOPERATORRESPONSE']._serialized_start=4057
- _globals['_ARRAYOPERATORRESPONSE']._serialized_end=4125
- _globals['_LISTRESPONSE']._serialized_start=4128
- _globals['_LISTRESPONSE']._serialized_end=4285
- _globals['_GETSTATUSREQUEST']._serialized_start=4287
- _globals['_GETSTATUSREQUEST']._serialized_end=4358
- _globals['_GETSTATUSRESPONSE']._serialized_start=4360
- _globals['_GETSTATUSRESPONSE']._serialized_end=4395
- _globals['_LISTALLOPERATORSREQUEST']._serialized_start=4397
- _globals['_LISTALLOPERATORSREQUEST']._serialized_end=4422
- _globals['_LISTALLOPERATORSRESPONSE']._serialized_start=4424
- _globals['_LISTALLOPERATORSRESPONSE']._serialized_end=4465
- _globals['_OPERATORSERVICE']._serialized_start=4468
- _globals['_OPERATORSERVICE']._serialized_end=5541
+ _globals['_OPERATOR']._serialized_start=358
+ _globals['_OPERATOR']._serialized_end=435
+ _globals['_SPECIFICATION']._serialized_start=438
+ _globals['_SPECIFICATION']._serialized_end=1086
+ _globals['_SPECIFICATION_MAPINPUTPINSPECENTRY']._serialized_start=826
+ _globals['_SPECIFICATION_MAPINPUTPINSPECENTRY']._serialized_end=929
+ _globals['_SPECIFICATION_MAPOUTPUTPINSPECENTRY']._serialized_start=931
+ _globals['_SPECIFICATION_MAPOUTPUTPINSPECENTRY']._serialized_end=1035
+ _globals['_SPECIFICATION_PROPERTIESENTRY']._serialized_start=1037
+ _globals['_SPECIFICATION_PROPERTIESENTRY']._serialized_end=1086
+ _globals['_PINSPECIFICATION']._serialized_start=1089
+ _globals['_PINSPECIFICATION']._serialized_end=1223
+ _globals['_OPERATORINPUT']._serialized_start=1225
+ _globals['_OPERATORINPUT']._serialized_end=1314
+ _globals['_UPDATEREQUEST']._serialized_start=1317
+ _globals['_UPDATEREQUEST']._serialized_end=2506
+ _globals['_ARRAYUPDATEREQUEST']._serialized_start=2509
+ _globals['_ARRAYUPDATEREQUEST']._serialized_end=2683
+ _globals['_UPDATECONFIGREQUEST']._serialized_start=2686
+ _globals['_UPDATECONFIGREQUEST']._serialized_end=2826
+ _globals['_OPERATOREVALUATIONREQUEST']._serialized_start=2829
+ _globals['_OPERATOREVALUATIONREQUEST']._serialized_end=3011
+ _globals['_CREATEOPERATORREQUEST']._serialized_start=3013
+ _globals['_CREATEOPERATORREQUEST']._serialized_end=3116
+ _globals['_OPERATORRESPONSE']._serialized_start=3119
+ _globals['_OPERATORRESPONSE']._serialized_end=4063
+ _globals['_ARRAYOPERATORRESPONSE']._serialized_start=4065
+ _globals['_ARRAYOPERATORRESPONSE']._serialized_end=4133
+ _globals['_LISTRESPONSE']._serialized_start=4136
+ _globals['_LISTRESPONSE']._serialized_end=4293
+ _globals['_GETSTATUSREQUEST']._serialized_start=4295
+ _globals['_GETSTATUSREQUEST']._serialized_end=4366
+ _globals['_GETSTATUSRESPONSE']._serialized_start=4368
+ _globals['_GETSTATUSRESPONSE']._serialized_end=4403
+ _globals['_LISTALLOPERATORSREQUEST']._serialized_start=4405
+ _globals['_LISTALLOPERATORSREQUEST']._serialized_end=4430
+ _globals['_LISTALLOPERATORSRESPONSE']._serialized_start=4432
+ _globals['_LISTALLOPERATORSRESPONSE']._serialized_end=4473
+ _globals['_OPERATORSERVICE']._serialized_start=4476
+ _globals['_OPERATORSERVICE']._serialized_end=5549
# @@protoc_insertion_point(module_scope)
diff --git a/src/ansys/grpc/dpf/scoping_pb2.py b/src/ansys/grpc/dpf/scoping_pb2.py
index 6d121623fe..0630e9b4cf 100644
--- a/src/ansys/grpc/dpf/scoping_pb2.py
+++ b/src/ansys/grpc/dpf/scoping_pb2.py
@@ -13,9 +13,10 @@
import ansys.grpc.dpf.base_pb2 as base__pb2
+import ansys.grpc.dpf.collection_message_pb2 as collection__message__pb2
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\rscoping.proto\x12\x18\x61nsys.api.dpf.scoping.v0\x1a\nbase.proto\">\n\x07Scoping\x12\x33\n\x02id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\"\xc1\x01\n\rUpdateRequest\x12\x32\n\x07scoping\x18\x01 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.Scoping\x12\x33\n\x08location\x18\x03 \x01(\x0b\x32\x1f.ansys.api.dpf.base.v0.LocationH\x00\x12\x35\n\x08index_id\x18\x04 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.IndexIdH\x00\x42\x10\n\x0eupdate_request\"U\n\x10UpdateIdsRequest\x12\x32\n\x07scoping\x18\x01 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.Scoping\x12\r\n\x05\x61rray\x18\x02 \x01(\x0c\"$\n\x07IndexId\x12\n\n\x02id\x18\x01 \x01(\x05\x12\r\n\x05index\x18\x02 \x01(\x05\"v\n\x0c\x43ountRequest\x12\x32\n\x07scoping\x18\x01 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.Scoping\x12\x32\n\x06\x65ntity\x18\x02 \x01(\x0e\x32\".ansys.api.dpf.base.v0.CountEntity\"C\n\x13GetLocationResponse\x12,\n\x03loc\x18\x01 \x01(\x0b\x32\x1f.ansys.api.dpf.base.v0.Location\"o\n\nGetRequest\x12\x32\n\x07scoping\x18\x01 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.Scoping\x12\x0c\n\x02id\x18\x02 \x01(\x05H\x00\x12\x0f\n\x05index\x18\x03 \x01(\x05H\x00\x42\x0e\n\x0ctype_request\"<\n\x0bGetResponse\x12\x0c\n\x02id\x18\x01 \x01(\x05H\x00\x12\x0f\n\x05index\x18\x02 \x01(\x05H\x00\x42\x0e\n\x0ctype_request\"\x1d\n\x0cListResponse\x12\r\n\x05\x61rray\x18\x01 \x01(\x0c\x32\xb1\x05\n\x0eScopingService\x12I\n\x06\x43reate\x12\x1c.ansys.api.dpf.base.v0.Empty\x1a!.ansys.api.dpf.scoping.v0.Scoping\x12O\n\x06Update\x12\'.ansys.api.dpf.scoping.v0.UpdateRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12W\n\tUpdateIds\x12*.ansys.api.dpf.scoping.v0.UpdateIdsRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty(\x01\x12S\n\x04List\x12!.ansys.api.dpf.scoping.v0.Scoping\x1a&.ansys.api.dpf.scoping.v0.ListResponse0\x01\x12U\n\x05\x43ount\x12&.ansys.api.dpf.scoping.v0.CountRequest\x1a$.ansys.api.dpf.base.v0.CountResponse\x12_\n\x0bGetLocation\x12!.ansys.api.dpf.scoping.v0.Scoping\x1a-.ansys.api.dpf.scoping.v0.GetLocationResponse\x12R\n\x03Get\x12$.ansys.api.dpf.scoping.v0.GetRequest\x1a%.ansys.api.dpf.scoping.v0.GetResponse\x12I\n\x06\x44\x65lete\x12!.ansys.api.dpf.scoping.v0.Scoping\x1a\x1c.ansys.api.dpf.base.v0.EmptyB\x1b\xaa\x02\x18\x41nsys.Api.Dpf.Scoping.V0b\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\rscoping.proto\x12\x18\x61nsys.api.dpf.scoping.v0\x1a\nbase.proto\x1a\x18\x63ollection_message.proto\">\n\x07Scoping\x12\x33\n\x02id\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.base.v0.EntityIdentifier\"\xc1\x01\n\rUpdateRequest\x12\x32\n\x07scoping\x18\x01 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.Scoping\x12\x33\n\x08location\x18\x03 \x01(\x0b\x32\x1f.ansys.api.dpf.base.v0.LocationH\x00\x12\x35\n\x08index_id\x18\x04 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.IndexIdH\x00\x42\x10\n\x0eupdate_request\"U\n\x10UpdateIdsRequest\x12\x32\n\x07scoping\x18\x01 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.Scoping\x12\r\n\x05\x61rray\x18\x02 \x01(\x0c\"$\n\x07IndexId\x12\n\n\x02id\x18\x01 \x01(\x05\x12\r\n\x05index\x18\x02 \x01(\x05\"v\n\x0c\x43ountRequest\x12\x32\n\x07scoping\x18\x01 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.Scoping\x12\x32\n\x06\x65ntity\x18\x02 \x01(\x0e\x32\".ansys.api.dpf.base.v0.CountEntity\"C\n\x13GetLocationResponse\x12,\n\x03loc\x18\x01 \x01(\x0b\x32\x1f.ansys.api.dpf.base.v0.Location\"o\n\nGetRequest\x12\x32\n\x07scoping\x18\x01 \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.Scoping\x12\x0c\n\x02id\x18\x02 \x01(\x05H\x00\x12\x0f\n\x05index\x18\x03 \x01(\x05H\x00\x42\x0e\n\x0ctype_request\"<\n\x0bGetResponse\x12\x0c\n\x02id\x18\x01 \x01(\x05H\x00\x12\x0f\n\x05index\x18\x02 \x01(\x05H\x00\x42\x0e\n\x0ctype_request\"\x1d\n\x0cListResponse\x12\r\n\x05\x61rray\x18\x01 \x01(\x0c\"\x7f\n\x14SingleScopingAllData\x12\x34\n\x03ids\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.Collection\x12\x31\n\x08location\x18\x02 \x01(\x0b\x32\x1f.ansys.api.dpf.base.v0.Location\"K\n\x07\x41llData\x12@\n\x08scopings\x18\x01 \x03(\x0b\x32..ansys.api.dpf.scoping.v0.SingleScopingAllData\"M\n\x16\x43reateWithDataResponse\x12\x33\n\x08scopings\x18\x01 \x03(\x0b\x32!.ansys.api.dpf.scoping.v0.Scoping2\x9c\x06\n\x0eScopingService\x12I\n\x06\x43reate\x12\x1c.ansys.api.dpf.base.v0.Empty\x1a!.ansys.api.dpf.scoping.v0.Scoping\x12i\n\x0e\x43reateWithData\x12!.ansys.api.dpf.scoping.v0.AllData\x1a\x30.ansys.api.dpf.scoping.v0.CreateWithDataResponse(\x01\x30\x01\x12O\n\x06Update\x12\'.ansys.api.dpf.scoping.v0.UpdateRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12W\n\tUpdateIds\x12*.ansys.api.dpf.scoping.v0.UpdateIdsRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty(\x01\x12S\n\x04List\x12!.ansys.api.dpf.scoping.v0.Scoping\x1a&.ansys.api.dpf.scoping.v0.ListResponse0\x01\x12U\n\x05\x43ount\x12&.ansys.api.dpf.scoping.v0.CountRequest\x1a$.ansys.api.dpf.base.v0.CountResponse\x12_\n\x0bGetLocation\x12!.ansys.api.dpf.scoping.v0.Scoping\x1a-.ansys.api.dpf.scoping.v0.GetLocationResponse\x12R\n\x03Get\x12$.ansys.api.dpf.scoping.v0.GetRequest\x1a%.ansys.api.dpf.scoping.v0.GetResponse\x12I\n\x06\x44\x65lete\x12!.ansys.api.dpf.scoping.v0.Scoping\x1a\x1c.ansys.api.dpf.base.v0.EmptyB\x1b\xaa\x02\x18\x41nsys.Api.Dpf.Scoping.V0b\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -23,24 +24,30 @@
if _descriptor._USE_C_DESCRIPTORS == False:
_globals['DESCRIPTOR']._options = None
_globals['DESCRIPTOR']._serialized_options = b'\252\002\030Ansys.Api.Dpf.Scoping.V0'
- _globals['_SCOPING']._serialized_start=55
- _globals['_SCOPING']._serialized_end=117
- _globals['_UPDATEREQUEST']._serialized_start=120
- _globals['_UPDATEREQUEST']._serialized_end=313
- _globals['_UPDATEIDSREQUEST']._serialized_start=315
- _globals['_UPDATEIDSREQUEST']._serialized_end=400
- _globals['_INDEXID']._serialized_start=402
- _globals['_INDEXID']._serialized_end=438
- _globals['_COUNTREQUEST']._serialized_start=440
- _globals['_COUNTREQUEST']._serialized_end=558
- _globals['_GETLOCATIONRESPONSE']._serialized_start=560
- _globals['_GETLOCATIONRESPONSE']._serialized_end=627
- _globals['_GETREQUEST']._serialized_start=629
- _globals['_GETREQUEST']._serialized_end=740
- _globals['_GETRESPONSE']._serialized_start=742
- _globals['_GETRESPONSE']._serialized_end=802
- _globals['_LISTRESPONSE']._serialized_start=804
- _globals['_LISTRESPONSE']._serialized_end=833
- _globals['_SCOPINGSERVICE']._serialized_start=836
- _globals['_SCOPINGSERVICE']._serialized_end=1525
+ _globals['_SCOPING']._serialized_start=81
+ _globals['_SCOPING']._serialized_end=143
+ _globals['_UPDATEREQUEST']._serialized_start=146
+ _globals['_UPDATEREQUEST']._serialized_end=339
+ _globals['_UPDATEIDSREQUEST']._serialized_start=341
+ _globals['_UPDATEIDSREQUEST']._serialized_end=426
+ _globals['_INDEXID']._serialized_start=428
+ _globals['_INDEXID']._serialized_end=464
+ _globals['_COUNTREQUEST']._serialized_start=466
+ _globals['_COUNTREQUEST']._serialized_end=584
+ _globals['_GETLOCATIONRESPONSE']._serialized_start=586
+ _globals['_GETLOCATIONRESPONSE']._serialized_end=653
+ _globals['_GETREQUEST']._serialized_start=655
+ _globals['_GETREQUEST']._serialized_end=766
+ _globals['_GETRESPONSE']._serialized_start=768
+ _globals['_GETRESPONSE']._serialized_end=828
+ _globals['_LISTRESPONSE']._serialized_start=830
+ _globals['_LISTRESPONSE']._serialized_end=859
+ _globals['_SINGLESCOPINGALLDATA']._serialized_start=861
+ _globals['_SINGLESCOPINGALLDATA']._serialized_end=988
+ _globals['_ALLDATA']._serialized_start=990
+ _globals['_ALLDATA']._serialized_end=1065
+ _globals['_CREATEWITHDATARESPONSE']._serialized_start=1067
+ _globals['_CREATEWITHDATARESPONSE']._serialized_end=1144
+ _globals['_SCOPINGSERVICE']._serialized_start=1147
+ _globals['_SCOPINGSERVICE']._serialized_end=1943
# @@protoc_insertion_point(module_scope)
diff --git a/src/ansys/grpc/dpf/scoping_pb2_grpc.py b/src/ansys/grpc/dpf/scoping_pb2_grpc.py
index 99c80746e2..4abb93f046 100644
--- a/src/ansys/grpc/dpf/scoping_pb2_grpc.py
+++ b/src/ansys/grpc/dpf/scoping_pb2_grpc.py
@@ -20,6 +20,11 @@ def __init__(self, channel):
request_serializer=base__pb2.Empty.SerializeToString,
response_deserializer=scoping__pb2.Scoping.FromString,
)
+ self.CreateWithData = channel.stream_stream(
+ '/ansys.api.dpf.scoping.v0.ScopingService/CreateWithData',
+ request_serializer=scoping__pb2.AllData.SerializeToString,
+ response_deserializer=scoping__pb2.CreateWithDataResponse.FromString,
+ )
self.Update = channel.unary_unary(
'/ansys.api.dpf.scoping.v0.ScopingService/Update',
request_serializer=scoping__pb2.UpdateRequest.SerializeToString,
@@ -66,6 +71,12 @@ def Create(self, request, context):
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
+ def CreateWithData(self, request_iterator, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
def Update(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
@@ -120,6 +131,11 @@ def add_ScopingServiceServicer_to_server(servicer, server):
request_deserializer=base__pb2.Empty.FromString,
response_serializer=scoping__pb2.Scoping.SerializeToString,
),
+ 'CreateWithData': grpc.stream_stream_rpc_method_handler(
+ servicer.CreateWithData,
+ request_deserializer=scoping__pb2.AllData.FromString,
+ response_serializer=scoping__pb2.CreateWithDataResponse.SerializeToString,
+ ),
'Update': grpc.unary_unary_rpc_method_handler(
servicer.Update,
request_deserializer=scoping__pb2.UpdateRequest.FromString,
@@ -182,6 +198,23 @@ def Create(request,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+ @staticmethod
+ def CreateWithData(request_iterator,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.stream_stream(request_iterator, target, '/ansys.api.dpf.scoping.v0.ScopingService/CreateWithData',
+ scoping__pb2.AllData.SerializeToString,
+ scoping__pb2.CreateWithDataResponse.FromString,
+ options, channel_credentials,
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
@staticmethod
def Update(request,
target,
diff --git a/src/ansys/grpc/dpf/workflow_pb2.py b/src/ansys/grpc/dpf/workflow_pb2.py
index 1046423220..a11d7bb9cc 100644
--- a/src/ansys/grpc/dpf/workflow_pb2.py
+++ b/src/ansys/grpc/dpf/workflow_pb2.py
@@ -12,7 +12,7 @@
_sym_db = _symbol_database.Default()
-import ansys.grpc.dpf.collection_pb2 as collection__pb2
+import ansys.grpc.dpf.collection_message_pb2 as collection__message__pb2
import ansys.grpc.dpf.field_pb2 as field__pb2
import ansys.grpc.dpf.scoping_pb2 as scoping__pb2
import ansys.grpc.dpf.base_pb2 as base__pb2
@@ -29,7 +29,7 @@
import ansys.grpc.dpf.label_space_pb2 as label__space__pb2
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0eworkflow.proto\x12\x19\x61nsys.api.dpf.workflow.v0\x1a\x10\x63ollection.proto\x1a\x0b\x66ield.proto\x1a\rscoping.proto\x1a\nbase.proto\x1a\x12\x64\x61ta_sources.proto\x1a\x13meshed_region.proto\x1a\x17time_freq_support.proto\x1a\x11result_info.proto\x1a\x0eoperator.proto\x1a\x14\x63yclic_support.proto\x1a\x16workflow_message.proto\x1a\x15\x64pf_any_message.proto\x1a\x0f\x64\x61ta_tree.proto\x1a\x1cgeneric_data_container.proto\x1a\x11label_space.proto\":\n#WorkflowFromInternalRegistryRequest\x12\x13\n\x0bregistry_id\x18\x01 \x01(\x05\"\xf2\x08\n\x17UpdateConnectionRequest\x12\x37\n\x02wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x10\n\x08pin_name\x18\x02 \x01(\t\x12\r\n\x03str\x18\x03 \x01(\tH\x00\x12\r\n\x03int\x18\x04 \x01(\x05H\x00\x12\x10\n\x06\x64ouble\x18\x05 \x01(\x01H\x00\x12\x0e\n\x04\x62ool\x18\x06 \x01(\x08H\x00\x12.\n\x05\x66ield\x18\x07 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.FieldH\x00\x12=\n\ncollection\x18\x08 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.CollectionH\x00\x12\x34\n\x07scoping\x18\t \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.ScopingH\x00\x12\x42\n\x0c\x64\x61ta_sources\x18\n \x01(\x0b\x32*.ansys.api.dpf.data_sources.v0.DataSourcesH\x00\x12<\n\x04mesh\x18\x0b \x01(\x0b\x32,.ansys.api.dpf.meshed_region.v0.MeshedRegionH\x00\x12\x30\n\x04vint\x18\x0c \x01(\x0b\x32 .ansys.api.dpf.base.v0.IntVectorH\x00\x12\x36\n\x07vdouble\x18\r \x01(\x0b\x32#.ansys.api.dpf.base.v0.DoubleVectorH\x00\x12\x45\n\x0b\x63yc_support\x18\x0e \x01(\x0b\x32..ansys.api.dpf.cyclic_support.v0.CyclicSupportH\x00\x12P\n\x11time_freq_support\x18\x0f \x01(\x0b\x32\x33.ansys.api.dpf.time_freq_support.v0.TimeFreqSupportH\x00\x12?\n\x08workflow\x18\x10 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.WorkflowH\x00\x12\x39\n\tdata_tree\x18\x12 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTreeH\x00\x12?\n\x0blabel_space\x18\x14 \x01(\x0b\x32(.ansys.api.dpf.label_space.v0.LabelSpaceH\x00\x12_\n\x16generic_data_container\x18\x15 \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainerH\x00\x12:\n\x06\x61s_any\x18\x13 \x01(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAnyH\x00\x12?\n\x07inputop\x18\x11 \x01(\x0b\x32,.ansys.api.dpf.dpf_operator.v0.OperatorInputH\x00\x42\x07\n\x05input\"\xc1\x01\n\x1c\x41rrayUpdateConnectionRequest\x12\x37\n\x02wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x10\n\x08pin_name\x18\x02 \x01(\t\x12+\n\x05\x61rray\x18\x03 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.Array\x12)\n\x04type\x18\x04 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\"\x8a\x01\n\x0eOperatorNaming\x12\x0b\n\x03pin\x18\x02 \x01(\x05\x12\x0c\n\x04name\x18\x03 \x01(\t\x12;\n\x08operator\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.OperatorH\x00\x12\x12\n\x08old_name\x18\x04 \x01(\tH\x00\x42\x0c\n\nidentifier\"\x88\x02\n\x15UpdatePinNamesRequest\x12\x37\n\x02wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12@\n\rinputs_naming\x18\x02 \x03(\x0b\x32).ansys.api.dpf.workflow.v0.OperatorNaming\x12\x41\n\x0eoutputs_naming\x18\x03 \x03(\x0b\x32).ansys.api.dpf.workflow.v0.OperatorNaming\x12\x17\n\x0finputs_to_erase\x18\x04 \x03(\t\x12\x18\n\x10outputs_to_erase\x18\x05 \x03(\t\"\xbf\x01\n\x19WorkflowEvaluationRequest\x12\x37\n\x02wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x10\n\x08pin_name\x18\x02 \x01(\t\x12)\n\x04type\x18\x03 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\x12,\n\x07subtype\x18\x04 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\"\xb0\x07\n\x10WorkflowResponse\x12\r\n\x03str\x18\x03 \x01(\tH\x00\x12\r\n\x03int\x18\x04 \x01(\x05H\x00\x12\x10\n\x06\x64ouble\x18\x05 \x01(\x01H\x00\x12\x0e\n\x04\x62ool\x18\x06 \x01(\x08H\x00\x12.\n\x05\x66ield\x18\x07 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.FieldH\x00\x12=\n\ncollection\x18\x08 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.CollectionH\x00\x12\x34\n\x07scoping\x18\t \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.ScopingH\x00\x12<\n\x04mesh\x18\n \x01(\x0b\x32,.ansys.api.dpf.meshed_region.v0.MeshedRegionH\x00\x12?\n\x0bresult_info\x18\x0b \x01(\x0b\x32(.ansys.api.dpf.result_info.v0.ResultInfoH\x00\x12P\n\x11time_freq_support\x18\x0c \x01(\x0b\x32\x33.ansys.api.dpf.time_freq_support.v0.TimeFreqSupportH\x00\x12\x42\n\x0c\x64\x61ta_sources\x18\r \x01(\x0b\x32*.ansys.api.dpf.data_sources.v0.DataSourcesH\x00\x12\x45\n\x0b\x63yc_support\x18\x0e \x01(\x0b\x32..ansys.api.dpf.cyclic_support.v0.CyclicSupportH\x00\x12?\n\x08workflow\x18\x0f \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.WorkflowH\x00\x12\x37\n\x03\x61ny\x18\x10 \x01(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAnyH\x00\x12;\n\x08operator\x18\x11 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.OperatorH\x00\x12\x39\n\tdata_tree\x18\x12 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTreeH\x00\x12_\n\x16generic_data_container\x18\x13 \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainerH\x00\x42\x08\n\x06output\"D\n\x15\x41rrayWorkflowResponse\x12+\n\x05\x61rray\x18\x01 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.Array\"\x8a\x01\n\x13\x41\x64\x64OperatorsRequest\x12\x37\n\x02wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12:\n\toperators\x18\x02 \x03(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.Operator\"\x89\x01\n\x1fRecordInInternalRegistryRequest\x12\x37\n\x02wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x12\n\nidentifier\x18\x02 \x01(\t\x12\x19\n\x11transferOwnership\x18\x03 \x01(\x08\".\n RecordInInternalRegistryResponse\x12\n\n\x02id\x18\x01 \x01(\x05\"$\n\x0f\x45xposedPinNames\x12\x11\n\tpin_names\x18\x01 \x03(\t\"\xb1\x01\n\x0cListResponse\x12\x16\n\x0eoperator_names\x18\x01 \x03(\t\x12\x43\n\x0finput_pin_names\x18\x02 \x01(\x0b\x32*.ansys.api.dpf.workflow.v0.ExposedPinNames\x12\x44\n\x10output_pin_names\x18\x03 \x01(\x0b\x32*.ansys.api.dpf.workflow.v0.ExposedPinNames\"\xa0\x01\n\x12GetOperatorRequest\x12\x37\n\x02wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x16\n\x0coperator_num\x18\x02 \x01(\x05H\x00\x12\x14\n\ninput_name\x18\x03 \x01(\tH\x00\x12\x15\n\x0boutput_name\x18\x04 \x01(\tH\x00\x42\x0c\n\nop_request\"^\n\x13GetOperatorResponse\x12\x34\n\x03ops\x18\x01 \x03(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.Operator\x12\x11\n\tpin_index\x18\x02 \x03(\x05\"D\n\x19InputToOutputChainRequest\x12\x13\n\x0boutput_name\x18\x01 \x01(\t\x12\x12\n\ninput_name\x18\x02 \x01(\t\"\xdc\x01\n\x0e\x43onnectRequest\x12=\n\x08right_wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12<\n\x07left_wf\x18\x02 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12M\n\x0finput_to_output\x18\x03 \x03(\x0b\x32\x34.ansys.api.dpf.workflow.v0.InputToOutputChainRequest\"\x1c\n\nTextStream\x12\x0e\n\x06stream\x18\x01 \x01(\t\"\x8b\x01\n\rCreateRequest\x12-\n\x05\x65mpty\x18\x01 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.EmptyH\x00\x12\x43\n\x0bremote_copy\x18\x02 \x01(\x0b\x32,.ansys.api.dpf.workflow.v0.RemoteCopyRequestH\x00\x42\x06\n\x04type\"]\n\x11RemoteCopyRequest\x12\x37\n\x02wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x0f\n\x07\x61\x64\x64ress\x18\x02 \x01(\t2\x91\x0e\n\x0fWorkflowService\x12_\n\x06\x43reate\x12(.ansys.api.dpf.workflow.v0.CreateRequest\x1a+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x64\n\x0eLoadFromStream\x12%.ansys.api.dpf.workflow.v0.TextStream\x1a+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x86\x01\n\x17GetFromInternalRegistry\x12>.ansys.api.dpf.workflow.v0.WorkflowFromInternalRegistryRequest\x1a+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x64\n\x10UpdateConnection\x12\x32.ansys.api.dpf.workflow.v0.UpdateConnectionRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12`\n\x0eUpdatePinNames\x12\x30.ansys.api.dpf.workflow.v0.UpdatePinNamesRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12\\\n\x0c\x41\x64\x64Operators\x12..ansys.api.dpf.workflow.v0.AddOperatorsRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12h\n\x03Get\x12\x34.ansys.api.dpf.workflow.v0.WorkflowEvaluationRequest\x1a+.ansys.api.dpf.workflow.v0.WorkflowResponse\x12\x93\x01\n\x18RecordInInternalRegistry\x12:.ansys.api.dpf.workflow.v0.RecordInInternalRegistryRequest\x1a;.ansys.api.dpf.workflow.v0.RecordInInternalRegistryResponse\x12\\\n\x04List\x12+.ansys.api.dpf.workflow_message.v0.Workflow\x1a\'.ansys.api.dpf.workflow.v0.ListResponse\x12R\n\x07\x43onnect\x12).ansys.api.dpf.workflow.v0.ConnectRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12^\n\x11\x44iscoverOperators\x12+.ansys.api.dpf.workflow_message.v0.Workflow\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12\x63\n\rWriteToStream\x12+.ansys.api.dpf.workflow_message.v0.Workflow\x1a%.ansys.api.dpf.workflow.v0.TextStream\x12l\n\x0bGetOperator\x12-.ansys.api.dpf.workflow.v0.GetOperatorRequest\x1a..ansys.api.dpf.workflow.v0.GetOperatorResponse\x12S\n\x06\x44\x65lete\x12+.ansys.api.dpf.workflow_message.v0.Workflow\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12`\n\x11LoadFromBigStream\x12\x1c.ansys.api.dpf.base.v0.Array\x1a+.ansys.api.dpf.workflow_message.v0.Workflow(\x01\x12s\n\x18UpdateConnectionStreamed\x12\x37.ansys.api.dpf.workflow.v0.ArrayUpdateConnectionRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty(\x01\x12w\n\x0bGetStreamed\x12\x34.ansys.api.dpf.workflow.v0.WorkflowEvaluationRequest\x1a\x30.ansys.api.dpf.workflow.v0.ArrayWorkflowResponse0\x01\x42\x1c\xaa\x02\x19\x41nsys.Api.Dpf.Workflow.V0b\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0eworkflow.proto\x12\x19\x61nsys.api.dpf.workflow.v0\x1a\x18\x63ollection_message.proto\x1a\x0b\x66ield.proto\x1a\rscoping.proto\x1a\nbase.proto\x1a\x12\x64\x61ta_sources.proto\x1a\x13meshed_region.proto\x1a\x17time_freq_support.proto\x1a\x11result_info.proto\x1a\x0eoperator.proto\x1a\x14\x63yclic_support.proto\x1a\x16workflow_message.proto\x1a\x15\x64pf_any_message.proto\x1a\x0f\x64\x61ta_tree.proto\x1a\x1cgeneric_data_container.proto\x1a\x11label_space.proto\":\n#WorkflowFromInternalRegistryRequest\x12\x13\n\x0bregistry_id\x18\x01 \x01(\x05\"\xf2\x08\n\x17UpdateConnectionRequest\x12\x37\n\x02wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x10\n\x08pin_name\x18\x02 \x01(\t\x12\r\n\x03str\x18\x03 \x01(\tH\x00\x12\r\n\x03int\x18\x04 \x01(\x05H\x00\x12\x10\n\x06\x64ouble\x18\x05 \x01(\x01H\x00\x12\x0e\n\x04\x62ool\x18\x06 \x01(\x08H\x00\x12.\n\x05\x66ield\x18\x07 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.FieldH\x00\x12=\n\ncollection\x18\x08 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.CollectionH\x00\x12\x34\n\x07scoping\x18\t \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.ScopingH\x00\x12\x42\n\x0c\x64\x61ta_sources\x18\n \x01(\x0b\x32*.ansys.api.dpf.data_sources.v0.DataSourcesH\x00\x12<\n\x04mesh\x18\x0b \x01(\x0b\x32,.ansys.api.dpf.meshed_region.v0.MeshedRegionH\x00\x12\x30\n\x04vint\x18\x0c \x01(\x0b\x32 .ansys.api.dpf.base.v0.IntVectorH\x00\x12\x36\n\x07vdouble\x18\r \x01(\x0b\x32#.ansys.api.dpf.base.v0.DoubleVectorH\x00\x12\x45\n\x0b\x63yc_support\x18\x0e \x01(\x0b\x32..ansys.api.dpf.cyclic_support.v0.CyclicSupportH\x00\x12P\n\x11time_freq_support\x18\x0f \x01(\x0b\x32\x33.ansys.api.dpf.time_freq_support.v0.TimeFreqSupportH\x00\x12?\n\x08workflow\x18\x10 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.WorkflowH\x00\x12\x39\n\tdata_tree\x18\x12 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTreeH\x00\x12?\n\x0blabel_space\x18\x14 \x01(\x0b\x32(.ansys.api.dpf.label_space.v0.LabelSpaceH\x00\x12_\n\x16generic_data_container\x18\x15 \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainerH\x00\x12:\n\x06\x61s_any\x18\x13 \x01(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAnyH\x00\x12?\n\x07inputop\x18\x11 \x01(\x0b\x32,.ansys.api.dpf.dpf_operator.v0.OperatorInputH\x00\x42\x07\n\x05input\"\xc1\x01\n\x1c\x41rrayUpdateConnectionRequest\x12\x37\n\x02wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x10\n\x08pin_name\x18\x02 \x01(\t\x12+\n\x05\x61rray\x18\x03 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.Array\x12)\n\x04type\x18\x04 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\"\x8a\x01\n\x0eOperatorNaming\x12\x0b\n\x03pin\x18\x02 \x01(\x05\x12\x0c\n\x04name\x18\x03 \x01(\t\x12;\n\x08operator\x18\x01 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.OperatorH\x00\x12\x12\n\x08old_name\x18\x04 \x01(\tH\x00\x42\x0c\n\nidentifier\"\x88\x02\n\x15UpdatePinNamesRequest\x12\x37\n\x02wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12@\n\rinputs_naming\x18\x02 \x03(\x0b\x32).ansys.api.dpf.workflow.v0.OperatorNaming\x12\x41\n\x0eoutputs_naming\x18\x03 \x03(\x0b\x32).ansys.api.dpf.workflow.v0.OperatorNaming\x12\x17\n\x0finputs_to_erase\x18\x04 \x03(\t\x12\x18\n\x10outputs_to_erase\x18\x05 \x03(\t\"\xbf\x01\n\x19WorkflowEvaluationRequest\x12\x37\n\x02wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x10\n\x08pin_name\x18\x02 \x01(\t\x12)\n\x04type\x18\x03 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\x12,\n\x07subtype\x18\x04 \x01(\x0e\x32\x1b.ansys.api.dpf.base.v0.Type\"\xb0\x07\n\x10WorkflowResponse\x12\r\n\x03str\x18\x03 \x01(\tH\x00\x12\r\n\x03int\x18\x04 \x01(\x05H\x00\x12\x10\n\x06\x64ouble\x18\x05 \x01(\x01H\x00\x12\x0e\n\x04\x62ool\x18\x06 \x01(\x08H\x00\x12.\n\x05\x66ield\x18\x07 \x01(\x0b\x32\x1d.ansys.api.dpf.field.v0.FieldH\x00\x12=\n\ncollection\x18\x08 \x01(\x0b\x32\'.ansys.api.dpf.collection.v0.CollectionH\x00\x12\x34\n\x07scoping\x18\t \x01(\x0b\x32!.ansys.api.dpf.scoping.v0.ScopingH\x00\x12<\n\x04mesh\x18\n \x01(\x0b\x32,.ansys.api.dpf.meshed_region.v0.MeshedRegionH\x00\x12?\n\x0bresult_info\x18\x0b \x01(\x0b\x32(.ansys.api.dpf.result_info.v0.ResultInfoH\x00\x12P\n\x11time_freq_support\x18\x0c \x01(\x0b\x32\x33.ansys.api.dpf.time_freq_support.v0.TimeFreqSupportH\x00\x12\x42\n\x0c\x64\x61ta_sources\x18\r \x01(\x0b\x32*.ansys.api.dpf.data_sources.v0.DataSourcesH\x00\x12\x45\n\x0b\x63yc_support\x18\x0e \x01(\x0b\x32..ansys.api.dpf.cyclic_support.v0.CyclicSupportH\x00\x12?\n\x08workflow\x18\x0f \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.WorkflowH\x00\x12\x37\n\x03\x61ny\x18\x10 \x01(\x0b\x32(.ansys.api.dpf.dpf_any_message.v0.DpfAnyH\x00\x12;\n\x08operator\x18\x11 \x01(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.OperatorH\x00\x12\x39\n\tdata_tree\x18\x12 \x01(\x0b\x32$.ansys.api.dpf.data_tree.v0.DataTreeH\x00\x12_\n\x16generic_data_container\x18\x13 \x01(\x0b\x32=.ansys.api.dpf.generic_data_container.v0.GenericDataContainerH\x00\x42\x08\n\x06output\"D\n\x15\x41rrayWorkflowResponse\x12+\n\x05\x61rray\x18\x01 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.Array\"\x8a\x01\n\x13\x41\x64\x64OperatorsRequest\x12\x37\n\x02wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12:\n\toperators\x18\x02 \x03(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.Operator\"\x89\x01\n\x1fRecordInInternalRegistryRequest\x12\x37\n\x02wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x12\n\nidentifier\x18\x02 \x01(\t\x12\x19\n\x11transferOwnership\x18\x03 \x01(\x08\".\n RecordInInternalRegistryResponse\x12\n\n\x02id\x18\x01 \x01(\x05\"$\n\x0f\x45xposedPinNames\x12\x11\n\tpin_names\x18\x01 \x03(\t\"\xb1\x01\n\x0cListResponse\x12\x16\n\x0eoperator_names\x18\x01 \x03(\t\x12\x43\n\x0finput_pin_names\x18\x02 \x01(\x0b\x32*.ansys.api.dpf.workflow.v0.ExposedPinNames\x12\x44\n\x10output_pin_names\x18\x03 \x01(\x0b\x32*.ansys.api.dpf.workflow.v0.ExposedPinNames\"\xa0\x01\n\x12GetOperatorRequest\x12\x37\n\x02wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x16\n\x0coperator_num\x18\x02 \x01(\x05H\x00\x12\x14\n\ninput_name\x18\x03 \x01(\tH\x00\x12\x15\n\x0boutput_name\x18\x04 \x01(\tH\x00\x42\x0c\n\nop_request\"^\n\x13GetOperatorResponse\x12\x34\n\x03ops\x18\x01 \x03(\x0b\x32\'.ansys.api.dpf.dpf_operator.v0.Operator\x12\x11\n\tpin_index\x18\x02 \x03(\x05\"D\n\x19InputToOutputChainRequest\x12\x13\n\x0boutput_name\x18\x01 \x01(\t\x12\x12\n\ninput_name\x18\x02 \x01(\t\"\xdc\x01\n\x0e\x43onnectRequest\x12=\n\x08right_wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12<\n\x07left_wf\x18\x02 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12M\n\x0finput_to_output\x18\x03 \x03(\x0b\x32\x34.ansys.api.dpf.workflow.v0.InputToOutputChainRequest\"\x1c\n\nTextStream\x12\x0e\n\x06stream\x18\x01 \x01(\t\"\x8b\x01\n\rCreateRequest\x12-\n\x05\x65mpty\x18\x01 \x01(\x0b\x32\x1c.ansys.api.dpf.base.v0.EmptyH\x00\x12\x43\n\x0bremote_copy\x18\x02 \x01(\x0b\x32,.ansys.api.dpf.workflow.v0.RemoteCopyRequestH\x00\x42\x06\n\x04type\"]\n\x11RemoteCopyRequest\x12\x37\n\x02wf\x18\x01 \x01(\x0b\x32+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x0f\n\x07\x61\x64\x64ress\x18\x02 \x01(\t2\x91\x0e\n\x0fWorkflowService\x12_\n\x06\x43reate\x12(.ansys.api.dpf.workflow.v0.CreateRequest\x1a+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x64\n\x0eLoadFromStream\x12%.ansys.api.dpf.workflow.v0.TextStream\x1a+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x86\x01\n\x17GetFromInternalRegistry\x12>.ansys.api.dpf.workflow.v0.WorkflowFromInternalRegistryRequest\x1a+.ansys.api.dpf.workflow_message.v0.Workflow\x12\x64\n\x10UpdateConnection\x12\x32.ansys.api.dpf.workflow.v0.UpdateConnectionRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12`\n\x0eUpdatePinNames\x12\x30.ansys.api.dpf.workflow.v0.UpdatePinNamesRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12\\\n\x0c\x41\x64\x64Operators\x12..ansys.api.dpf.workflow.v0.AddOperatorsRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12h\n\x03Get\x12\x34.ansys.api.dpf.workflow.v0.WorkflowEvaluationRequest\x1a+.ansys.api.dpf.workflow.v0.WorkflowResponse\x12\x93\x01\n\x18RecordInInternalRegistry\x12:.ansys.api.dpf.workflow.v0.RecordInInternalRegistryRequest\x1a;.ansys.api.dpf.workflow.v0.RecordInInternalRegistryResponse\x12\\\n\x04List\x12+.ansys.api.dpf.workflow_message.v0.Workflow\x1a\'.ansys.api.dpf.workflow.v0.ListResponse\x12R\n\x07\x43onnect\x12).ansys.api.dpf.workflow.v0.ConnectRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12^\n\x11\x44iscoverOperators\x12+.ansys.api.dpf.workflow_message.v0.Workflow\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12\x63\n\rWriteToStream\x12+.ansys.api.dpf.workflow_message.v0.Workflow\x1a%.ansys.api.dpf.workflow.v0.TextStream\x12l\n\x0bGetOperator\x12-.ansys.api.dpf.workflow.v0.GetOperatorRequest\x1a..ansys.api.dpf.workflow.v0.GetOperatorResponse\x12S\n\x06\x44\x65lete\x12+.ansys.api.dpf.workflow_message.v0.Workflow\x1a\x1c.ansys.api.dpf.base.v0.Empty\x12`\n\x11LoadFromBigStream\x12\x1c.ansys.api.dpf.base.v0.Array\x1a+.ansys.api.dpf.workflow_message.v0.Workflow(\x01\x12s\n\x18UpdateConnectionStreamed\x12\x37.ansys.api.dpf.workflow.v0.ArrayUpdateConnectionRequest\x1a\x1c.ansys.api.dpf.base.v0.Empty(\x01\x12w\n\x0bGetStreamed\x12\x34.ansys.api.dpf.workflow.v0.WorkflowEvaluationRequest\x1a\x30.ansys.api.dpf.workflow.v0.ArrayWorkflowResponse0\x01\x42\x1c\xaa\x02\x19\x41nsys.Api.Dpf.Workflow.V0b\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -37,46 +37,46 @@
if _descriptor._USE_C_DESCRIPTORS == False:
_globals['DESCRIPTOR']._options = None
_globals['DESCRIPTOR']._serialized_options = b'\252\002\031Ansys.Api.Dpf.Workflow.V0'
- _globals['_WORKFLOWFROMINTERNALREGISTRYREQUEST']._serialized_start=339
- _globals['_WORKFLOWFROMINTERNALREGISTRYREQUEST']._serialized_end=397
- _globals['_UPDATECONNECTIONREQUEST']._serialized_start=400
- _globals['_UPDATECONNECTIONREQUEST']._serialized_end=1538
- _globals['_ARRAYUPDATECONNECTIONREQUEST']._serialized_start=1541
- _globals['_ARRAYUPDATECONNECTIONREQUEST']._serialized_end=1734
- _globals['_OPERATORNAMING']._serialized_start=1737
- _globals['_OPERATORNAMING']._serialized_end=1875
- _globals['_UPDATEPINNAMESREQUEST']._serialized_start=1878
- _globals['_UPDATEPINNAMESREQUEST']._serialized_end=2142
- _globals['_WORKFLOWEVALUATIONREQUEST']._serialized_start=2145
- _globals['_WORKFLOWEVALUATIONREQUEST']._serialized_end=2336
- _globals['_WORKFLOWRESPONSE']._serialized_start=2339
- _globals['_WORKFLOWRESPONSE']._serialized_end=3283
- _globals['_ARRAYWORKFLOWRESPONSE']._serialized_start=3285
- _globals['_ARRAYWORKFLOWRESPONSE']._serialized_end=3353
- _globals['_ADDOPERATORSREQUEST']._serialized_start=3356
- _globals['_ADDOPERATORSREQUEST']._serialized_end=3494
- _globals['_RECORDININTERNALREGISTRYREQUEST']._serialized_start=3497
- _globals['_RECORDININTERNALREGISTRYREQUEST']._serialized_end=3634
- _globals['_RECORDININTERNALREGISTRYRESPONSE']._serialized_start=3636
- _globals['_RECORDININTERNALREGISTRYRESPONSE']._serialized_end=3682
- _globals['_EXPOSEDPINNAMES']._serialized_start=3684
- _globals['_EXPOSEDPINNAMES']._serialized_end=3720
- _globals['_LISTRESPONSE']._serialized_start=3723
- _globals['_LISTRESPONSE']._serialized_end=3900
- _globals['_GETOPERATORREQUEST']._serialized_start=3903
- _globals['_GETOPERATORREQUEST']._serialized_end=4063
- _globals['_GETOPERATORRESPONSE']._serialized_start=4065
- _globals['_GETOPERATORRESPONSE']._serialized_end=4159
- _globals['_INPUTTOOUTPUTCHAINREQUEST']._serialized_start=4161
- _globals['_INPUTTOOUTPUTCHAINREQUEST']._serialized_end=4229
- _globals['_CONNECTREQUEST']._serialized_start=4232
- _globals['_CONNECTREQUEST']._serialized_end=4452
- _globals['_TEXTSTREAM']._serialized_start=4454
- _globals['_TEXTSTREAM']._serialized_end=4482
- _globals['_CREATEREQUEST']._serialized_start=4485
- _globals['_CREATEREQUEST']._serialized_end=4624
- _globals['_REMOTECOPYREQUEST']._serialized_start=4626
- _globals['_REMOTECOPYREQUEST']._serialized_end=4719
- _globals['_WORKFLOWSERVICE']._serialized_start=4722
- _globals['_WORKFLOWSERVICE']._serialized_end=6531
+ _globals['_WORKFLOWFROMINTERNALREGISTRYREQUEST']._serialized_start=347
+ _globals['_WORKFLOWFROMINTERNALREGISTRYREQUEST']._serialized_end=405
+ _globals['_UPDATECONNECTIONREQUEST']._serialized_start=408
+ _globals['_UPDATECONNECTIONREQUEST']._serialized_end=1546
+ _globals['_ARRAYUPDATECONNECTIONREQUEST']._serialized_start=1549
+ _globals['_ARRAYUPDATECONNECTIONREQUEST']._serialized_end=1742
+ _globals['_OPERATORNAMING']._serialized_start=1745
+ _globals['_OPERATORNAMING']._serialized_end=1883
+ _globals['_UPDATEPINNAMESREQUEST']._serialized_start=1886
+ _globals['_UPDATEPINNAMESREQUEST']._serialized_end=2150
+ _globals['_WORKFLOWEVALUATIONREQUEST']._serialized_start=2153
+ _globals['_WORKFLOWEVALUATIONREQUEST']._serialized_end=2344
+ _globals['_WORKFLOWRESPONSE']._serialized_start=2347
+ _globals['_WORKFLOWRESPONSE']._serialized_end=3291
+ _globals['_ARRAYWORKFLOWRESPONSE']._serialized_start=3293
+ _globals['_ARRAYWORKFLOWRESPONSE']._serialized_end=3361
+ _globals['_ADDOPERATORSREQUEST']._serialized_start=3364
+ _globals['_ADDOPERATORSREQUEST']._serialized_end=3502
+ _globals['_RECORDININTERNALREGISTRYREQUEST']._serialized_start=3505
+ _globals['_RECORDININTERNALREGISTRYREQUEST']._serialized_end=3642
+ _globals['_RECORDININTERNALREGISTRYRESPONSE']._serialized_start=3644
+ _globals['_RECORDININTERNALREGISTRYRESPONSE']._serialized_end=3690
+ _globals['_EXPOSEDPINNAMES']._serialized_start=3692
+ _globals['_EXPOSEDPINNAMES']._serialized_end=3728
+ _globals['_LISTRESPONSE']._serialized_start=3731
+ _globals['_LISTRESPONSE']._serialized_end=3908
+ _globals['_GETOPERATORREQUEST']._serialized_start=3911
+ _globals['_GETOPERATORREQUEST']._serialized_end=4071
+ _globals['_GETOPERATORRESPONSE']._serialized_start=4073
+ _globals['_GETOPERATORRESPONSE']._serialized_end=4167
+ _globals['_INPUTTOOUTPUTCHAINREQUEST']._serialized_start=4169
+ _globals['_INPUTTOOUTPUTCHAINREQUEST']._serialized_end=4237
+ _globals['_CONNECTREQUEST']._serialized_start=4240
+ _globals['_CONNECTREQUEST']._serialized_end=4460
+ _globals['_TEXTSTREAM']._serialized_start=4462
+ _globals['_TEXTSTREAM']._serialized_end=4490
+ _globals['_CREATEREQUEST']._serialized_start=4493
+ _globals['_CREATEREQUEST']._serialized_end=4632
+ _globals['_REMOTECOPYREQUEST']._serialized_start=4634
+ _globals['_REMOTECOPYREQUEST']._serialized_end=4727
+ _globals['_WORKFLOWSERVICE']._serialized_start=4730
+ _globals['_WORKFLOWSERVICE']._serialized_end=6539
# @@protoc_insertion_point(module_scope)