Contact
CoCalc Logo Icon
StoreFeaturesDocsShareSupport News Sign UpSign In
| Download
Views: 1021
%typeset_mode True
var('x, y, xi, eta') var('x_0, x_1, x_2, x_3, x_4, x_5, x_6, x_7,' 'y_0, y_1, y_2, y_3, y_4, y_5, y_6, y_7') assume(x_0, 'constant') assume(x_1, 'constant') assume(x_2, 'constant') assume(x_3, 'constant') assume(x_4, 'constant') assume(x_5, 'constant') assume(x_6, 'constant') assume(x_7, 'constant') assume(y_0, 'constant') assume(y_1, 'constant') assume(y_2, 'constant') assume(y_3, 'constant') assume(y_4, 'constant') assume(y_5, 'constant') assume(y_6, 'constant') assume(y_7, 'constant')
(x\displaystyle x, y\displaystyle y, ξ\displaystyle \xi, η\displaystyle \eta)
(x0\displaystyle x_{0}, x1\displaystyle x_{1}, x2\displaystyle x_{2}, x3\displaystyle x_{3}, x4\displaystyle x_{4}, x5\displaystyle x_{5}, x6\displaystyle x_{6}, x7\displaystyle x_{7}, y0\displaystyle y_{0}, y1\displaystyle y_{1}, y2\displaystyle y_{2}, y3\displaystyle y_{3}, y4\displaystyle y_{4}, y5\displaystyle y_{5}, y6\displaystyle y_{6}, y7\displaystyle y_{7})
N_0 = (-1.0 / 4.0) * (1 - xi) * (1 + eta) * (1 + xi - eta) N_1 = (1.0 / 2.0) * (1 - xi) * (1 - eta**2) N_2 = (-1.0 / 4.0) * (1 - xi) * (1 - eta) * (1 + xi + eta) N_3 = (1.0 / 2.0) * (1 - eta) * (1 - xi**2) N_4 = (-1.0 / 4.0) * (1 + xi) * (1 - eta) * (1 - xi + eta) N_5 = (1.0 / 2.0) * (1 + xi) * (1 - eta**2) N_6 = (-1.0 / 4.0) * (1 + xi) * (1 + eta) * (1 - xi - eta) N_7 = (1.0 / 2.0) * (1 + eta) * (1 - xi**2) N_0 N_1 N_2 N_3 N_4 N_5 N_6 N_7
(ηξ1)(η+1)(0.250000000000000ξ0.250000000000000)\displaystyle -{\left(\eta - \xi - 1\right)} {\left(\eta + 1\right)} {\left(0.250000000000000 \, \xi - 0.250000000000000\right)}
(η21)(0.500000000000000ξ+0.500000000000000)\displaystyle -{\left(\eta^{2} - 1\right)} {\left(-0.500000000000000 \, \xi + 0.500000000000000\right)}
(η+ξ+1)(η1)(0.250000000000000ξ0.250000000000000)\displaystyle -{\left(\eta + \xi + 1\right)} {\left(\eta - 1\right)} {\left(0.250000000000000 \, \xi - 0.250000000000000\right)}
(ξ21)(0.500000000000000η+0.500000000000000)\displaystyle -{\left(\xi^{2} - 1\right)} {\left(-0.500000000000000 \, \eta + 0.500000000000000\right)}
(ηξ+1)(η1)(0.250000000000000ξ0.250000000000000)\displaystyle -{\left(\eta - \xi + 1\right)} {\left(\eta - 1\right)} {\left(-0.250000000000000 \, \xi - 0.250000000000000\right)}
(η21)(0.500000000000000ξ+0.500000000000000)\displaystyle -{\left(\eta^{2} - 1\right)} {\left(0.500000000000000 \, \xi + 0.500000000000000\right)}
(η+ξ1)(η+1)(0.250000000000000ξ0.250000000000000)\displaystyle -{\left(\eta + \xi - 1\right)} {\left(\eta + 1\right)} {\left(-0.250000000000000 \, \xi - 0.250000000000000\right)}
(ξ21)(0.500000000000000η+0.500000000000000)\displaystyle -{\left(\xi^{2} - 1\right)} {\left(0.500000000000000 \, \eta + 0.500000000000000\right)}
import numpy as np # Mesh of circular ring nodes = [[-0. , 0. ], [-0. , 3. ], [-0. , 1. ], [-1. , 0. ], [-3. , 0. ], [-0. , -1. ], [-0. , -3. ], [ 1. , -0. ], [ 3. , -0. ], [-0.70710678, 0.70710678], [-2.12132034, 2.12132034], [-2.12132034, -2.12132034], [-0.70710678, -0.70710678], [ 2.12132034, -2.12132034], [ 0.70710678, -0.70710678], [ 2.12132034, 2.12132034], [ 0.70710678, 0.70710678], [ 0. , 2. ], [-2. , 0. ], [ 0. , -2. ], [ 2. , 0. ], [-1.41421356, 1.41421356], [-1.41421356, -1.41421356], [ 1.41421356, -1.41421356], [ 1.41421356, 1.41421356]] elements = [[1, 10, 4, 18, 3, 9, 2, 17, 21], [3, 18, 4, 11, 6, 19, 5, 12, 22], [5, 19, 6, 13, 8, 20, 7, 14, 23], [7, 16, 2, 17, 1, 15, 8, 20, 24]] xi_LGL = np.array([-1., -0.96956805, -0.89920053, -0.79200829, -0.6523887, -0.48605942, -0.29983047, -0.10132627, 0.10132627, 0.29983047, 0.48605942, 0.6523887, 0.79200829, 0.89920053, 0.96956805, 1.]) eta_LGL = np.array([-1., -0.96956805, -0.89920053, -0.79200829, -0.6523887, -0.48605942, -0.29983047, -0.10132627, 0.10132627, 0.29983047, 0.48605942, 0.6523887, 0.79200829, 0.89920053, 0.96956805, 1.]) Xi, Eta = np.meshgrid(xi_LGL, eta_LGL) x = N_0 * nodes[elements[0][0]][0] + N_1 * nodes[elements[0][1]][0] + N_2 * nodes[elements[0][2]][0] + N_3 * nodes[elements[0][3]][0] + N_4 * nodes[elements[0][4]][0] + N_5 * nodes[elements[0][5]][0] + N_6 * nodes[elements[0][6]][0] + N_7 * nodes[elements[0][7]][0] y = N_0 * nodes[elements[0][0]][1] + N_1 * nodes[elements[0][1]][1] + N_2 * nodes[elements[0][2]][1] + N_3 * nodes[elements[0][3]][1] + N_4 * nodes[elements[0][4]][1] + N_5 * nodes[elements[0][5]][1] + N_6 * nodes[elements[0][6]][1] + N_7 * nodes[elements[0][7]][1]
jacobian = diff(x, xi) * diff(y, eta) - diff(y, xi) * diff(x, eta) jacobian.simplify_full()
0.41421356η2+((2.77555756156×1017)η30.20710678η2+(1.11022302463×1016)η0.70710678)ξ+1.41421356\displaystyle 0.41421356 \, \eta^{2} + {\left(-\left(2.77555756156 \times 10^{-17}\right) \, \eta^{3} - 0.20710678 \, \eta^{2} + \left(1.11022302463 \times 10^{-16}\right) \, \eta - 0.70710678\right)} \xi + 1.41421356
jacobian_array = np.zeros(Xi.shape) for eta_idx in np.arange(eta_LGL.size): for xi_idx in np.arange(xi_LGL.size): jacobian_array[eta_idx][xi_idx] = jacobian(xi = xi_LGL[xi_idx], eta = eta_LGL[eta_idx]) np.savetxt('jacobian_data.csv', jacobian_array, delimiter = ',')
for eta_points in eta_LGL: for xi_points in xi_LGL: print round(xi_points, 3), '\t', print print np.round(Xi, 3) print '\n****************************************\n' for eta_points in eta_LGL: for xi_points in xi_LGL: print round(eta_points, 3), '\t', print print np.round(Eta, 3)
-1.0 -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1.0 -1.0 -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1.0 -1.0 -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1.0 -1.0 -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1.0 -1.0 -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1.0 -1.0 -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1.0 -1.0 -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1.0 -1.0 -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1.0 -1.0 -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1.0 -1.0 -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1.0 -1.0 -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1.0 -1.0 -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1.0 -1.0 -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1.0 -1.0 -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1.0 -1.0 -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1.0 -1.0 -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1.0 [[-1. -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1. ] [-1. -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1. ] [-1. -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1. ] [-1. -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1. ] [-1. -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1. ] [-1. -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1. ] [-1. -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1. ] [-1. -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1. ] [-1. -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1. ] [-1. -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1. ] [-1. -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1. ] [-1. -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1. ] [-1. -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1. ] [-1. -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1. ] [-1. -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1. ] [-1. -0.97 -0.899 -0.792 -0.652 -0.486 -0.3 -0.101 0.101 0.3 0.486 0.652 0.792 0.899 0.97 1. ]] **************************************** -1.0 -1.0 -1.0 -1.0 -1.0 -1.0 -1.0 -1.0 -1.0 -1.0 -1.0 -1.0 -1.0 -1.0 -1.0 -1.0 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 [[-1. -1. -1. -1. -1. -1. -1. -1. -1. -1. -1. -1. -1. -1. -1. -1. ] [-0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 -0.97 ] [-0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899 -0.899] [-0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792 -0.792] [-0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652 -0.652] [-0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486 -0.486] [-0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 -0.3 ] [-0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101 -0.101] [ 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101 0.101] [ 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 ] [ 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486 0.486] [ 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652 0.652] [ 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792 0.792] [ 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899 0.899] [ 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 0.97 ] [ 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. ]]