Commit 165edb0b authored by Kelvinrr's avatar Kelvinrr
Browse files

cartesian problems

parent 5fd920ab
Loading
Loading
Loading
Loading
+1 −18
Original line number Diff line number Diff line
%% Cell type:code id: tags:

``` python
import os
import sys
sys.path.insert(0, os.path.abspath('..'))

from autocnet.examples import get_path
from autocnet.graph.network import CandidateGraph
from autocnet.graph.edge import Edge
from autocnet.matcher.matcher import FlannMatcher

from autocnet.matcher import subpixel as sp
from scipy.misc import imresize
import math
import warnings
import cv2

from bisect import bisect_left

from scipy.ndimage.interpolation import rotate

from IPython.display import display
warnings.filterwarnings('ignore')

%matplotlib inline
%pylab inline
```

%% Output

    Populating the interactive namespace from numpy and matplotlib

%% Cell type:markdown id: tags:

# Create Basic Structures

%% Cell type:code id: tags:

``` python
#Point to the adjacency Graph
adjacency = get_path('three_image_adjacency.json')
basepath = get_path('Apollo15')
cg = CandidateGraph.from_adjacency(adjacency, basepath=basepath)

#Apply SIFT to extract features
cg.extract_features(method='sift', extractor_parameters={'nfeatures':500})

#Match
cg.match_features()

# Perform the symmetry check
cg.symmetry_checks()
# Perform the ratio check
cg.ratio_checks(clean_keys = ['symmetry'])
# Create fundamental matrix
cg.compute_fundamental_matrices(clean_keys = ['symmetry', 'ratio'])


# Step: Compute the homographies and apply RANSAC
cg.compute_homographies(clean_keys=['symmetry', 'ratio'])

# Step: Compute the overlap ratio and coverage ratio
for s, d, edge in cg.edges_iter(data=True):
    edge.coverage_ratio(clean_keys=['symmetry', 'ratio'])

# Step: Compute subpixel offsets for candidate points
cg.subpixel_register(clean_keys=['ransac'])

cg.suppress(clean_keys=['symmetry', 'ratio', 'subpixel'])
```

%% Cell type:code id: tags:

``` python
lis = np.arange(0, 12, 3)
print(lis)

print(where(lis == 3)[0][0])
```

%% Output

    [0 3 6 9]
    1

%% Cell type:markdown id: tags:

# Define Stuff

%% Cell type:code id: tags:

``` python
from copy import deepcopy


def cartesian_product(arrays, out=None):
    """
    Generate a cartesian product of input arrays.

    Parameters
    ----------
    arrays : list of array-like
        1-D arrays to form the cartesian product of.
    out : ndarray
        Array to place the cartesian product in.

    Returns
    -------
    out : ndarray
        2-D array of shape (M, len(arrays)) containing cartesian products
        formed of input arrays.

    Examples
    --------
    >>> cartesian(([1, 2, 3], [4, 5], [6, 7]))
    array([[1, 4, 6],
           [1, 4, 7],
           [1, 5, 6],
           [1, 5, 7],
           [2, 4, 6],
           [2, 4, 7],
           [2, 5, 6],
           [2, 5, 7],
           [3, 4, 6],
           [3, 4, 7],
           [3, 5, 6],
           [3, 5, 7]])

    """

    arrays = [np.asarray(x) for x in arrays]
    dtype = arrays[0].dtype

    n = np.prod([x.size for x in arrays])
    if out is None:
        out = np.zeros([n, len(arrays)], dtype=dtype)

    m = n / arrays[0].size
    out[:,0] = np.repeat(arrays[0], m)
    if arrays[1:]:
        cartesian(arrays[1:], out=out[0:m,1:])
        cartesian_product(arrays[1:], out=out[0:m,1:])
        for j in range(1, arrays[0].size):
            out[j*m:(j+1)*m,1:] = out[0:m,1:]
    return out

def corr_normed(template, search):
    if(len(template) < len(search)):
        search = search[:len(template)]
    elif(len(template) > len(search)):
        template = template[:len(search)]

    # get averages
    template_avg = np.average(template)
    search_avg = np.average(search)

    # compute mean-corrected vetcors
    mc_template = [x-template_avg for x in template]
    mc_search = [x-search_avg for x in search]

    # Perform element-wise multiplication
    arr1xarr2 = np.multiply(mc_template, mc_search)

    # element-wise divide by the mangitude1 x magnitude2
    # and return the result
    std1xstd2 = numpy.std(template) * numpy.std(search)

    coeffs = [(x/std1xstd2) for x in arr1xarr2]
    return np.average(coeffs)


def to_polar_coord(shape, center):
    y,x = np.ogrid[:shape[0],:shape[1]]
    cy,cx = center
    tmin,tmax = (0,2*math.pi)

    # ensure stop angle > start angle
    if tmax < tmin:
        tmax += 2*np.pi

    # convert cartesian --> polar coordinates
    r2 = (x-cx)*(x-cx) + (y-cy)*(y-cy)
    theta = np.arctan2(x-cx,y-cy) - tmin

    # wrap angles between 0 and 2*pi
    theta %= (2*np.pi)

    return r2, theta

def circ_mask(shape,center,radius):
    r, theta = to_polar_coord(shape, center)

    circmask = r == radius*radius
    anglemask = theta <= 2*math.pi

    return circmask*anglemask

def radial_line_mask(shape, center, radius, alpha=0.19460421, atol=.01):
    r, theta = to_polar_coord(shape, center)

    line_mask = r <= radius**2
    anglemask = numpy.isclose(theta, alpha, atol=atol)

    return line_mask*anglemask


def ciratefi(template, search_image, upsampling=1, t1=.7, t2=.7,  alpha=math.pi/16,
             scales=[0.5, 0.57, 0.66,  0.76, 0.87, 1.0], radii=list(range(1,12))):
    '''
        Run the cirateft algorithm given the input template and target image along
        with scales and Radii.
    '''
    # check the paramter domains
    if upsampling < 1:
        raise ValueError('Upsampling must be >= 1')

    if template.shape > search_image.shape:
        raise ValueError('Template Image is smaller than Search Image for template of size: {} and search image of size: {}'\
                          .format(template.shape, search_image.shape))


    # Cifi -- Circular Sample on Template
    template_result = np.empty((len(scales), len(radii)))

    for i, s in enumerate(scales):
        scaled_img = imresize(template, s)
        for j, r in enumerate(radii):
            # Generate a circular mask
            a, b = (int(scaled_img.shape[0] / 2),
                    int(scaled_img.shape[1] / 2))

            if r > b or r > a:
                s =-1

            mask = circ_mask(scaled_img.shape, (a,b), r)

            inv_area = 1 / (2 * math.pi * r)
            s = np.sum(scaled_img[mask]) * inv_area

            if s == 0:
                s = -1
            template_result[i,j] = s

    # Cifi2 -- Circular Sample on Target Image
    search_result = np.empty((search_image.shape[0], search_image.shape[1], len(radii)))

    for i, y in enumerate(range(search_image.shape[0])):
        for j, x in enumerate(range(search_image.shape[1])):
            for k, r in enumerate(radii):
                inv_area = 1 / (2 * math.pi * r)

                mask = circ_mask(search_image.shape, (i,j), r)

                s = np.sum(search_image[mask]) * inv_area

                if s == 0:
                    s = -1
                search_result[i, j, k] = s

    # Perform Normalized Cross-Correlation between template and target image
    coeffs = np.empty((search_result.shape[0], search_result.shape[1]))

    cefi_results = np.zeros((search_result.shape[0], search_result.shape[1]))
    best_scales = np.empty((search_result.shape[0], search_result.shape[1]))

    for y in range(search_result.shape[0]):
        for x in range(search_result.shape[1]):
            scale = 0
            max_coeff = -math.inf
            for i in range(template_result.shape[0]):


                # result = cv2.matchTemplate(template_result[i].astype(np.float32), search_result[y,x].astype(np.float32), method=cv2.TM_CCORR_NORMED)
                # min_corr, max_corr, min_loc, max_loc = cv2.minMaxLoc(result)

                # max_corr = np.corrcoef(template_result[i], search_result[y,x])[0,1]


                max_corr = corr_normed(template_result[i], search_result[y][x])
                if max_corr > max_coeff:
                    max_coeff = max_corr
                    scale = i

            max_coeff
            coeffs[y,x] = max_coeff
            best_scales[y,x] = scales[scale]

    pylab.imshow(coeffs, interpolation='none')
    pylab.show()

    a, b = (int(search_image.shape[0] / 2),
        int(search_image.shape[1] / 2))

    print('Image Location: ', (a,b))
    print('Correlation at Image Location: ', coeffs[a][b])

    # get first grade candidate points
    fg_candidate_points = [(y,x) for (y,x),coeff in np.ndenumerate(coeffs) if coeff>=t1]
    pylab.show()

    if(not fg_candidate_points):
        raise ValueError('First filter Cifi returned empty set.')

    # Rafi 1  -- Get Radial Samples of Template Image
    alpha_list = np.arange(0, 2*pi, alpha)
    alpha_template_samples = np.zeros(len(alpha_list))
    center_y, center_x = (int(template.shape[0] / 2),
                          int(template.shape[1] / 2))

    # find the largest fitting radius
    rad_thresh = center_x if center_x <= center_y else center_y

    print(radii)
    print(bisect_left(radii, rad_thresh))

    if(rad_thresh >= max(radii)):
        radius = max(radii)
    else:
        radius = radii[bisect_left(radii, rad_thresh)]

    print('Radius being used: ', radius)

    for i in range(len(alpha_template_samples)):
        # Create Radial Line Mask
        mask = radial_line_mask(template.shape, (center_y, center_x), radius, alpha=alpha_list[i])

        # Sum the values
        alpha_template_samples[i] = np.sum(template[mask])/radius

    # Rafi 2 -- Get Radial Samples of the Search Image for all First Grade Candidate Points
    rafi_coeffs = np.zeros((len(fg_candidate_points), len(alpha_list)))

    for i in range(len(fg_candidate_points)):
        y, x = fg_candidate_points[i]

        # Scale image to the best fit scale
        scaled_img = imresize(search_image, best_scales[y][x])

        for j in range(len(alpha_list)):
            # Create Radial Mask
            mask = radial_line_mask(scaled_img.shape, (y, x), radius, alpha=alpha_list[j])
            rafi_coeffs[i][j] = np.sum(scaled_img[mask])/radius

    coeffs = np.zeros((search_result.shape[0], search_result.shape[1]))
    best_rotation = []
    sg_candidate_points = []

    # Perform Normalized Cross-Correlation between template and target image
    for i in range(len(fg_candidate_points)):
        maxcoeff = -math.inf
        maxrotate = 0
        for j in range(len(alpha_list)):
            c_shift_RQ = np.roll(alpha_template_samples, j)
            y, x = fg_candidate_points[i]

            # coeff = np.corrcoef(alpha_template_samples, rafi_coeffs[i])[0,1]
            # max_corr = coeff

            # result = cv2.matchTemplate(c_shift_RQ.astype(np.float32), rafi_coeffs[i].astype(np.float32), method=cv2.TM_CCORR_NORMED)
            # min_corr, max_corr, min_loc, max_loc = cv2.minMaxLoc(result)

            max_corr = corr_normed(c_shift_RQ, rafi_coeffs[i])

            if max_corr > maxcoeff:
                maxcoeff = max_corr
                maxrotate = j

        coeffs[y,x] = maxcoeff
        if(maxcoeff >= t2):
            best_rotation.append(alpha_list[maxrotate])
            sg_candidate_points.append((y,x))


    if(not sg_candidate_points):
        raise ValueError('Second filter Rafi returned empty set.')

    pylab.imshow(coeffs, interpolation='none')
    pylab.show()

    # best_rotation[sg_candidate_points.index((a,b))] = math.pi
    print('Image Location: ', (a,b))
    print('Alphas: ', alpha_list)
    print('Correlation At Image Location: ', coeffs[a][b])

    coeffs = np.zeros((search_image.shape[0], search_image.shape[1]))
    # sg_candidate_points = [(y,x) for (y,x),coeff in np.ndenumerate(coeffs) if coeff>=t2]
    tefi_coeffs = []

    # check for upsampling
    # if upsampling > 1:
    #     template = zoom(template, upsampling, order=1)
    #    search_image = zoom(search_image, upsampling, order=1)

    from autocnet.matcher.matcher import pattern_match

    # Tefi -- Template Matching Filter
    for i in range(len(sg_candidate_points)):
        y, x = sg_candidate_points[i]
        best_scale_idx = scales.index(best_scales[y,x])
        best_alpha_idx = (where(alpha_list == best_rotation[i]))[0][0]

        tefi_scales = np.array(scales).take(range(best_scale_idx-1, best_scale_idx+2), mode='wrap')
        tefi_alphas = alpha_list.take(range(best_alpha_idx-1, best_alpha_idx+2), mode='wrap')
        scalesXalphas = cartesian_product([tefi_scales, tefi_alphas])

        max_coeff = -math.inf

        for j in range(len(scalesXalphas)):
            transformed_template = imresize(template, scalesXalphas[j][0])
            transformed_template = rotate(transformed_template, scalesXalphas[j][1])

            y_window, x_window = (transformed_template.shape[0]/2,
                                  transformed_template.shape[1]/2)

            if(y < y_window or x < x_window):
                coeffs[y][x] = 0
                tefi_coeffs.append(0)
                continue

            cropped_search = search_image[y-y_window:y+y_window][x-x_window:x+x_window]

            # Result = cv2.matchTemplate(transformed_template.astype(np.float32), cropped_search.astype(float32), method=cv2.TM_CCOEFF_NORMED)
            # min_corr, max_corr, min_loc, max_loc = cv2.minMaxLoc(result)

            # print(y,x)
            # score = numpy.correlate(transformed_template[:, 0], cropped_search[:, 0], "full")

            # imshow(result, cmap='Greys')
            # show()

            # xmax, ymax, max_corr = pattern_match(transformed_template, search_image, upsampling=upsampling)

            # print('max at:', max_loc)
            # print('cp: ', (y,x))
            score = corr_normed(transformed_template.flatten(), cropped_search.flatten())

            if(score > max_coeff):
                max_coeff = score

        coeffs[y][x] = max_coeff
        tefi_coeffs.append(max_coeff)

    pylab.imshow(coeffs, interpolation='none')
    pylab.colorbar()
    (y,x) = sg_candidate_points[tefi_coeffs.index(max(tefi_coeffs))]
    scatter(y=[y],x=[x], c='g', s=40)
    show()

    print('Correlation at Image Location: ', )
    print('Maximum Correlation @ ', (y,x), ' with ', max(tefi_coeffs))
    print(tefi_coeffs)
```

%% Cell type:markdown id: tags:

# Do Stuff

%% Cell type:code id: tags:

``` python
from scipy.ndimage.interpolation import zoom
from scipy.stats.stats import pearsonr

figsize(10,10)
e = cg.edge[1][2]
matches = e.matches
clean_keys = ['subpixel']

full_offsets = np.zeros((len(matches), 3))

if clean_keys:
    matches, mask = e._clean(clean_keys)

# Preallocate the numpy array to avoid appending and type conversion
edge_offsets = np.empty((len(matches),3))

# for each edge, calculate this for each keypoint pair
for i, (idx, row) in enumerate(matches.iterrows()):
    s_idx = int(row['source_idx'])
    d_idx = int(row['destination_idx'])
    s_kps = e.source.get_keypoints().iloc[s_idx]
    d_kps = e.destination.get_keypoints().iloc[d_idx]

    s_keypoint = e.source.get_keypoints().iloc[s_idx][['x', 'y']].values
    d_keypoint = e.destination.get_keypoints().iloc[d_idx][['x', 'y']].values

    # Get the template and search windows
    s_template = sp.clip_roi(e.source.geodata, s_keypoint, 21)
    s_template = rotate(s_template, 90)
    s_template = imresize(s_template, .87)

    d_search = sp.clip_roi(e.destination.geodata, d_keypoint, 31)
    d_search = rotate(d_search, 180)
    d_search = imresize(d_search, .9)

    imshow(s_template, cmap='Greys')
    show()
    imshow(d_search, cmap='Greys')
    show()

    ciratefi(s_template, d_search, upsampling=16, alpha=math.pi/4, t1=.8, t2=.8, radii=list(range(1,12)))
    break
```

%% Output




    Image Location:  (13, 13)
    Correlation at Image Location:  0.972442125175
    [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
    8
    Radius being used:  9


    Image Location:  (13, 13)
    Alphas:  [ 0.          0.78539816  1.57079633  2.35619449  3.14159265  3.92699082
      4.71238898  5.49778714]
    Correlation At Image Location:  0.989644153764


    Correlation at Image Location:
    Maximum Correlation @  (12, 13)  with  0.0443647756405
    [0, -0.14086632084761205, -0.062728330774455152, 0.044364775640478381, 0.0126586032891797, -0.009509691538910698, 0.0126586032891797, -0.009509691538910698, -0.024059954650466012, -0.009509691538910698, -0.024059954650466012, -0.029520603586283511]

%% Cell type:code id: tags:

``` python
```

%% Cell type:code id: tags:

``` python
```

%% Cell type:code id: tags:

``` python
```