diff --git a/reverse_engineering/helpers/clusterHelper.js b/reverse_engineering/helpers/clusterHelper.js
index 3e8537b..3e45592 100644
--- a/reverse_engineering/helpers/clusterHelper.js
+++ b/reverse_engineering/helpers/clusterHelper.js
@@ -401,21 +401,21 @@ const getDbCollectionData = async ({
 	});
 	const documentKind = data.documentKinds?.[bucketName]?.documentKindName || '';
 	const options = { limit, pagination: data.pagination, bucketName, scopeName, collectionName };
+	const fieldInference = data.fieldInference;
 
 	let documents = [];
+	let standardDocument = null;
 	let query = queryHelper.getSelectCollectionDocumentsQuery({ bucketName, scopeName, collectionName });
 
 	try {
 		documents = await getPaginatedQuery({ cluster, options, query, logger });
-
-		return schemaHelper.getDbCollectionData({
+		standardDocument = await getCollectionDocumentByDocumentId({
+			cluster,
 			bucketName,
 			scopeName,
 			collectionName,
-			documentKind,
-			documents,
-			collectionIndexes,
-			includeEmptyCollection,
+			documentId: documents[0]?.docid,
+			logger,
 		});
 	} catch (error) {
 		try {
@@ -429,6 +429,14 @@ const getDbCollectionData = async ({
 						collectionName,
 					});
 					documents = await getPaginatedQuery({ cluster, options, query, logger });
+					standardDocument = await getCollectionDocumentByDocumentId({
+						cluster,
+						bucketName,
+						scopeName,
+						collectionName: DEFAULT_NAME,
+						documentId: documents[0]?.docid,
+						logger,
+					});
 					break;
 				case COUCHBASE_ERROR_CODE.primaryIndexDoesNotExist:
 					documents = await getCollectionDocumentsByInfer({
@@ -463,17 +471,19 @@ const getDbCollectionData = async ({
 		}
 
 		logger.error(error);
-
-		return schemaHelper.getDbCollectionData({
-			bucketName,
-			scopeName,
-			collectionName,
-			documentKind,
-			documents,
-			collectionIndexes,
-			includeEmptyCollection,
-		});
 	}
+
+	return schemaHelper.getDbCollectionData({
+		bucketName,
+		scopeName,
+		collectionName,
+		documentKind,
+		documents,
+		collectionIndexes,
+		includeEmptyCollection,
+		standardDocument,
+		fieldInference,
+	});
 };
 
 /**
@@ -522,6 +532,39 @@ const getSelectedCollections = async ({ cluster, data, logger, app }) => {
 	}, {});
 };
 
+/**
+ * @description Function returns a document with original order of fields
+ * @param {{
+ * cluster: Cluster;
+ * bucketName: string;
+ * scopeName: string;
+ * collectionName: string;
+ * documentId?: string;
+ * logger: Logger;
+ * }} param0
+ * @returns {Promise<Document|null>}
+ */
+const getCollectionDocumentByDocumentId = async ({
+	cluster,
+	bucketName,
+	scopeName,
+	collectionName,
+	documentId,
+	logger,
+}) => {
+	try {
+		const bucket = cluster.bucket(bucketName);
+		const scope = bucket.scope(scopeName);
+		const collection = scope.collection(collectionName);
+		const { content } = await collection.get(documentId);
+
+		return content;
+	} catch (error) {
+		logger.error(error);
+		return null;
+	}
+};
+
 module.exports = {
 	isBucketHasDefaultCollection,
 	getAllBuckets,
diff --git a/reverse_engineering/helpers/schemaHelper.js b/reverse_engineering/helpers/schemaHelper.js
index 2e9ebaf..e62065a 100644
--- a/reverse_engineering/helpers/schemaHelper.js
+++ b/reverse_engineering/helpers/schemaHelper.js
@@ -2,6 +2,7 @@
  * @typedef {import('../../shared/types').DbCollectionData} DbCollectionData
  * @typedef {import('../../shared/types').Document} Document
  * @typedef {import('../../shared/types').NameMap} NameMap
+ * @typedef {{ active: 'field' | 'alphabetical' }} FieldInference
  */
 const _ = require('lodash');
 const { DEFAULT_KEY_NAME, DEFAULT_NAME } = require('../../shared/constants');
@@ -14,7 +15,10 @@ const { DEFAULT_KEY_NAME, DEFAULT_NAME } = require('../../shared/constants');
  * collectionName: string;
  * documentKind: string;
  * collectionIndexes: object[];
- * includeEmptyCollection: boolean }} param0
+ * includeEmptyCollection: boolean;
+ * standardDocument: Document | null;
+ * fieldInference: FieldInference
+ *  }} param0
  * @returns {DbCollectionData}
  */
 const getDbCollectionData = ({
@@ -25,6 +29,8 @@ const getDbCollectionData = ({
 	documentKind,
 	collectionIndexes,
 	includeEmptyCollection,
+	standardDocument,
+	fieldInference,
 }) => {
 	const jsonDocuments = documents
 		.filter(item => _.isPlainObject(item[bucketName]))
@@ -32,7 +38,7 @@ const getDbCollectionData = ({
 			[DEFAULT_KEY_NAME]: item.docid,
 			...item[bucketName],
 		}));
-	const standardDoc = _.first(jsonDocuments);
+	const standardDoc = fieldInference.active === 'field' ? standardDocument : null;
 	const emptyBucket = !includeEmptyCollection && _.isEmpty(jsonDocuments);
 
 	return {