Skip to content

Commit

Permalink
pushing test prints #929
Browse files Browse the repository at this point in the history
  • Loading branch information
brunofavs committed Apr 18, 2024
1 parent 20502bf commit 6db4d7a
Show file tree
Hide file tree
Showing 12 changed files with 469 additions and 579 deletions.
15 changes: 11 additions & 4 deletions atom_calibration/scripts/calibrate
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,8 @@ def signal_handler(sig, frame):
# -------------------------------------------------------------------------------
def main():



atomStartupPrint('Starting calibration')

# ---------------------------------------
Expand Down Expand Up @@ -239,7 +241,7 @@ def main():


addNoiseToInitialGuess(dataset, args, selected_collection_key)
addNoiseToJointParameters(dataset, args)
# addNoiseToJointParameters(dataset, args)


# Verify both arguments were provided
Expand All @@ -255,8 +257,9 @@ def main():
# print(translation_tf_noise)
# print(rotation_tf_noise)

for tf_pair in args['noisy_tf_links']:
addNoiseToTF(dataset,selected_collection_key,tf_pair[0],tf_pair[1],translation_tf_noise,rotation_tf_noise)
# for tf_pair in args['noisy_tf_links']:
# addNoiseToTF(dataset,selected_collection_key,tf_pair[0],tf_pair[1],translation_tf_noise,rotation_tf_noise)
# addNoiseToTF(dataset,selected_collection_key,tf_pair[0],tf_pair[1],1,1)
# debugging print
# tf_link = generateKey(tf_pair[0],tf_pair[1])
# from pprint import pprint
Expand Down Expand Up @@ -338,6 +341,7 @@ def main():
# Steaming from the config json, we define a transform to be optimized for each sensor. It could happen that two
# or more sensors define the same transform to be optimized (#120). To cope with this we first create a list of
# transformations to be optimized and then compute the unique set of that list.

print("Creating sensor transformation parameters ...")
sensors_transforms_set = set()
for sensor_key, sensor in dataset["sensors"].items():
Expand Down Expand Up @@ -380,6 +384,7 @@ def main():
transform_key = generateKey(additional_tf['parent_link'], additional_tf['child_link'])
additional_transforms_set.add(transform_key)


if dataset['calibration_config']['additional_tfs'] is not None:
for _, additional_tf in dataset['calibration_config']['additional_tfs'].items():
transform_key = generateKey(additional_tf['parent_link'], additional_tf['child_link'])
Expand Down Expand Up @@ -736,7 +741,9 @@ def main():
opt.computeSparseMatrix()
# opt.printSparseMatrix()
# opt.printParameters()
opt.getParamNames()
# opt.getParamNames()



# ---------------------------------------
# --- Get a normalizer for each residual type
Expand Down
3 changes: 0 additions & 3 deletions atom_core/src/atom_core/config_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,6 @@

def mutually_inclusive_conditions(A,B):

print(A is None)
print(B is None)

if A is not None and B is not None:
return True
elif A is None and B is None:
Expand Down
32 changes: 32 additions & 0 deletions atom_core/src/atom_core/dataset_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -815,6 +815,8 @@ def addNoiseToInitialGuess(dataset, args, selected_collection_key):
if sensor_key != dataset['calibration_config']['anchored_sensor']:
calibration_child = sensor['calibration_child']
calibration_parent = sensor['calibration_parent']
print(calibration_child)
print(calibration_parent)
addNoiseToTF(dataset, selected_collection_key, calibration_parent, calibration_child, nig_trans, nig_rot)


Expand Down Expand Up @@ -853,9 +855,12 @@ def addNoiseToTF(dataset, selected_collection_key, calibration_parent, calibrati

elif dataset['transforms'][transform_key]['type'] == 'multiple':


for collection_key, collection in dataset["collections"].items():

# Get original transformation
tf_gt = copy.deepcopy(dataset['collections'][collection_key]['transforms'][transform_key])

quat = dataset['collections'][collection_key]['transforms'][transform_key]['quat']
translation = dataset['collections'][collection_key]['transforms'][transform_key]['trans']

Expand All @@ -873,6 +878,33 @@ def addNoiseToTF(dataset, selected_collection_key, calibration_parent, calibrati
dataset['collections'][collection_key]['transforms'][transform_key]['quat'] = new_quat
dataset['collections'][collection_key]['transforms'][transform_key]['trans'] = list(new_translation)

print(f'{Fore.BLUE}{type(new_quat)}{Style.RESET_ALL}')
print(f'{Fore.BLUE}{type(list(new_translation))}{Style.RESET_ALL}')

from atom_core.utilities import compareAtomTransforms
from pprint import pprint

et,er = compareAtomTransforms(tf_gt,dataset['collections'][collection_key]['transforms'][transform_key])
# pprint(tf_gt)
# pprint(dataset['collections'][collection_key]['transforms'][transform_key])

print("\n\nTranslation")
# print(f'{Fore.BLUE}{list(np.around(np.array(translation),4))}{Style.RESET_ALL}')
# print(f'{Fore.RED}{list(np.around(np.array(new_translation),4))}{Style.RESET_ALL}')
print(f'{Fore.GREEN}Diference\n{round(et,3)}{Style.RESET_ALL}')



print("\n\nRotation")
# print(f'{Fore.BLUE}{list(np.around(np.array(euler_angles),4))}{Style.RESET_ALL}')
# print(f'{Fore.RED}{list(np.around(np.array(new_angles),4))}{Style.RESET_ALL}')

print(f'{Fore.GREEN}Diference\n{round(er,3)}{Style.RESET_ALL}')



# exit()


def copyTFToDataset(calibration_parent, calibration_child, source_dataset, target_dataset):
"""
Expand Down
2 changes: 1 addition & 1 deletion atom_core/src/atom_core/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ def printComparisonToGroundTruth(dataset, dataset_initial, dataset_ground_truth,
if dataset['calibration_config']['additional_tfs'] is not None:
for additional_tf_key, additional_tf in dataset['calibration_config']['additional_tfs'].items():

transform_key = generateKey(additional_tf["parent_link"], sensor["child_link"])
transform_key = generateKey(additional_tf["parent_link"], additional_tf["child_link"])
row = [transform_key, Fore.LIGHTCYAN_EX + additional_tf_key + Style.RESET_ALL]

transform_calibrated = dataset['collections'][selected_collection_key]['transforms'][transform_key]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,9 +110,9 @@ additional_tfs:
# This is the transformation (i.e. link) that we be optimized.
#
# EXAMPLE:
#base_footprint_to_base_link:
# parent_link: "base_footprint"
# child_link: "base_link"
world_to_base_footprint:
parent_link: "world"
child_link: "base_footprint"


# ATOM can also calibrate several parameters of your joints.
Expand Down
Binary file modified atom_examples/softbot/softbot_calibration/calibration/summary.pdf
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,7 @@
{
"child": "base_footprint",
"from_bag": true,
"is_transformation_calibrated": false,
"is_transformation_calibrated": true,
"parent": "world",
"source": "world",
"target": "base_footprint",
Expand Down
Loading

0 comments on commit 6db4d7a

Please sign in to comment.